1 | # |
---|
2 | # gen_base.py -- infrastructure for generating makefiles, dependencies, etc. |
---|
3 | # |
---|
4 | |
---|
5 | import os |
---|
6 | import sys |
---|
7 | import glob |
---|
8 | import re |
---|
9 | import fileinput |
---|
10 | try: |
---|
11 | # Python >=3.0 |
---|
12 | import configparser |
---|
13 | except ImportError: |
---|
14 | # Python <3.0 |
---|
15 | import ConfigParser as configparser |
---|
16 | import generator.swig |
---|
17 | |
---|
18 | import getversion |
---|
19 | |
---|
20 | |
---|
21 | def _warning(msg): |
---|
22 | sys.stderr.write("WARNING: %s\n" % msg) |
---|
23 | |
---|
24 | def _error(msg): |
---|
25 | sys.stderr.write("ERROR: %s\n" % msg) |
---|
26 | sys.exit(1) |
---|
27 | |
---|
28 | class GeneratorBase: |
---|
29 | |
---|
30 | # |
---|
31 | # Derived classes should define a class attribute named _extension_map. |
---|
32 | # This attribute should be a dictionary of the form: |
---|
33 | # { (target-type, file-type): file-extension ...} |
---|
34 | # |
---|
35 | # where: target-type is 'exe', 'lib', ... |
---|
36 | # file-type is 'target', 'object', ... |
---|
37 | # |
---|
38 | |
---|
39 | def __init__(self, fname, verfname, options=None): |
---|
40 | # Retrieve major version from the C header, to avoid duplicating it in |
---|
41 | # build.conf - it is required because some file names include it. |
---|
42 | try: |
---|
43 | vsn_parser = getversion.Parser() |
---|
44 | vsn_parser.search('SVN_VER_MAJOR', 'libver') |
---|
45 | self.version = vsn_parser.parse(verfname).libver |
---|
46 | except: |
---|
47 | raise GenError('Unable to extract version.') |
---|
48 | |
---|
49 | # Read options |
---|
50 | self.release_mode = None |
---|
51 | for opt, val in options: |
---|
52 | if opt == '--release': |
---|
53 | self.release_mode = 1 |
---|
54 | |
---|
55 | # Now read and parse build.conf |
---|
56 | parser = configparser.ConfigParser() |
---|
57 | parser.read(fname) |
---|
58 | |
---|
59 | self.conf = build_path(os.path.abspath(fname)) |
---|
60 | |
---|
61 | self.sections = { } |
---|
62 | self.graph = DependencyGraph() |
---|
63 | |
---|
64 | # Allow derived classes to suppress certain configuration sections |
---|
65 | if not hasattr(self, 'skip_sections'): |
---|
66 | self.skip_sections = { } |
---|
67 | |
---|
68 | # The 'options' section does not represent a build target, |
---|
69 | # it simply contains global options |
---|
70 | self.skip_sections['options'] = None |
---|
71 | |
---|
72 | # Read in the global options |
---|
73 | self.includes = \ |
---|
74 | _collect_paths(parser.get('options', 'includes')) |
---|
75 | self.private_includes = \ |
---|
76 | _collect_paths(parser.get('options', 'private-includes')) |
---|
77 | self.private_built_includes = \ |
---|
78 | parser.get('options', 'private-built-includes').split() |
---|
79 | self.scripts = \ |
---|
80 | _collect_paths(parser.get('options', 'test-scripts')) |
---|
81 | self.bdb_scripts = \ |
---|
82 | _collect_paths(parser.get('options', 'bdb-test-scripts')) |
---|
83 | |
---|
84 | self.include_wildcards = \ |
---|
85 | parser.get('options', 'include-wildcards').split() |
---|
86 | self.swig_lang = parser.get('options', 'swig-languages').split() |
---|
87 | self.swig_dirs = parser.get('options', 'swig-dirs').split() |
---|
88 | |
---|
89 | # SWIG Generator |
---|
90 | self.swig = generator.swig.Generator(self.conf, "swig") |
---|
91 | |
---|
92 | # Visual C++ projects - contents are either TargetProject instances, |
---|
93 | # or other targets with an external-project attribute. |
---|
94 | self.projects = [] |
---|
95 | |
---|
96 | # Lists of pathnames of various kinds |
---|
97 | self.test_deps = [] # Non-BDB dependent items to build for the tests |
---|
98 | self.test_progs = [] # Subset of the above to actually execute |
---|
99 | self.bdb_test_deps = [] # BDB-dependent items to build for the tests |
---|
100 | self.bdb_test_progs = [] # Subset of the above to actually execute |
---|
101 | self.target_dirs = [] # Directories in which files are built |
---|
102 | self.manpages = [] # Manpages |
---|
103 | |
---|
104 | # Collect the build targets and have a reproducible ordering |
---|
105 | parser_sections = sorted(parser.sections()) |
---|
106 | for section_name in parser_sections: |
---|
107 | if section_name in self.skip_sections: |
---|
108 | continue |
---|
109 | |
---|
110 | options = {} |
---|
111 | for option in parser.options(section_name): |
---|
112 | options[option] = parser.get(section_name, option) |
---|
113 | |
---|
114 | type = options.get('type') |
---|
115 | |
---|
116 | target_class = _build_types.get(type) |
---|
117 | if not target_class: |
---|
118 | raise GenError('ERROR: unknown build type for ' + section_name) |
---|
119 | |
---|
120 | section = target_class.Section(target_class, section_name, options, self) |
---|
121 | |
---|
122 | self.sections[section_name] = section |
---|
123 | |
---|
124 | section.create_targets() |
---|
125 | |
---|
126 | # Compute intra-library dependencies |
---|
127 | for section in self.sections.values(): |
---|
128 | dependencies = (( DT_LINK, section.options.get('libs', "") ), |
---|
129 | ( DT_NONLIB, section.options.get('nonlibs', "") )) |
---|
130 | |
---|
131 | for dep_type, dep_names in dependencies: |
---|
132 | # Translate string names to Section objects |
---|
133 | dep_section_objects = [] |
---|
134 | for section_name in dep_names.split(): |
---|
135 | if section_name in self.sections: |
---|
136 | dep_section_objects.append(self.sections[section_name]) |
---|
137 | |
---|
138 | # For each dep_section that this section declares a dependency on, |
---|
139 | # take the targets of this section, and register a dependency on |
---|
140 | # any 'matching' targets of the dep_section. |
---|
141 | # |
---|
142 | # At the moment, the concept of multiple targets per section is |
---|
143 | # employed only for the SWIG modules, which have 1 target |
---|
144 | # per language. Then, 'matching' means being of the same language. |
---|
145 | for dep_section in dep_section_objects: |
---|
146 | for target in section.get_targets(): |
---|
147 | self.graph.bulk_add(dep_type, target.name, |
---|
148 | dep_section.get_dep_targets(target)) |
---|
149 | |
---|
150 | def compute_hdrs(self): |
---|
151 | """Get a list of the header files""" |
---|
152 | all_includes = list(map(native_path, self.includes + self.private_includes)) |
---|
153 | for d in unique(self.target_dirs): |
---|
154 | for wildcard in self.include_wildcards: |
---|
155 | hdrs = glob.glob(os.path.join(native_path(d), wildcard)) |
---|
156 | all_includes.extend(hdrs) |
---|
157 | return all_includes |
---|
158 | |
---|
159 | def compute_hdr_deps(self): |
---|
160 | """Compute the dependencies of each header file""" |
---|
161 | |
---|
162 | include_deps = IncludeDependencyInfo(self.compute_hdrs(), |
---|
163 | list(map(native_path, self.private_built_includes))) |
---|
164 | |
---|
165 | for objectfile, sources in self.graph.get_deps(DT_OBJECT): |
---|
166 | assert len(sources) == 1 |
---|
167 | source = sources[0] |
---|
168 | |
---|
169 | # Generated .c files must depend on all headers their parent .i file |
---|
170 | # includes |
---|
171 | if isinstance(objectfile, SWIGObject): |
---|
172 | swigsources = self.graph.get_sources(DT_SWIG_C, source) |
---|
173 | assert len(swigsources) == 1 |
---|
174 | ifile = swigsources[0] |
---|
175 | assert isinstance(ifile, SWIGSource) |
---|
176 | |
---|
177 | c_includes, swig_includes = \ |
---|
178 | include_deps.query_swig(native_path(ifile.filename)) |
---|
179 | for include_file in c_includes: |
---|
180 | self.graph.add(DT_OBJECT, objectfile, build_path(include_file)) |
---|
181 | for include_file in swig_includes: |
---|
182 | self.graph.add(DT_SWIG_C, source, build_path(include_file)) |
---|
183 | |
---|
184 | # Any non-swig C/C++ object must depend on the headers its parent |
---|
185 | # .c or .cpp includes. Note that 'object' includes gettext .mo files, |
---|
186 | # Java .class files, and .h files generated from Java classes, so |
---|
187 | # we must filter here. |
---|
188 | elif isinstance(source, SourceFile) and \ |
---|
189 | os.path.splitext(source.filename)[1] in ('.c', '.cpp'): |
---|
190 | for include_file in include_deps.query(native_path(source.filename)): |
---|
191 | self.graph.add(DT_OBJECT, objectfile, build_path(include_file)) |
---|
192 | |
---|
193 | |
---|
194 | class DependencyGraph: |
---|
195 | """Record dependencies between build items. |
---|
196 | |
---|
197 | See the DT_* values for the different dependency types. For each type, |
---|
198 | the target and source objects recorded will be different. They could |
---|
199 | be file names, Target objects, install types, etc. |
---|
200 | """ |
---|
201 | |
---|
202 | def __init__(self): |
---|
203 | self.deps = { } # type -> { target -> [ source ... ] } |
---|
204 | for dt in dep_types: |
---|
205 | self.deps[dt] = { } |
---|
206 | |
---|
207 | def add(self, type, target, source): |
---|
208 | if target in self.deps[type]: |
---|
209 | self.deps[type][target].append(source) |
---|
210 | else: |
---|
211 | self.deps[type][target] = [ source ] |
---|
212 | |
---|
213 | def bulk_add(self, type, target, sources): |
---|
214 | if target in self.deps[type]: |
---|
215 | self.deps[type][target].extend(sources) |
---|
216 | else: |
---|
217 | self.deps[type][target] = sources[:] |
---|
218 | |
---|
219 | def get_sources(self, type, target, cls=None): |
---|
220 | sources = self.deps[type].get(target, [ ]) |
---|
221 | if not cls: |
---|
222 | return sources |
---|
223 | filtered = [ ] |
---|
224 | for src in sources: |
---|
225 | if isinstance(src, cls): |
---|
226 | filtered.append(src) |
---|
227 | return filtered |
---|
228 | |
---|
229 | def get_all_sources(self, type): |
---|
230 | sources = [ ] |
---|
231 | for group in self.deps[type].values(): |
---|
232 | sources.extend(group) |
---|
233 | return sources |
---|
234 | |
---|
235 | def get_deps(self, type): |
---|
236 | return list(self.deps[type].items()) |
---|
237 | |
---|
238 | # dependency types |
---|
239 | dep_types = [ |
---|
240 | 'DT_INSTALL', # install areas. e.g. 'lib', 'base-lib' |
---|
241 | 'DT_OBJECT', # an object filename, depending upon .c filenames |
---|
242 | 'DT_SWIG_C', # a swig-generated .c file, depending upon .i filename(s) |
---|
243 | 'DT_LINK', # a libtool-linked filename, depending upon object fnames |
---|
244 | 'DT_NONLIB', # filename depends on object fnames, but isn't linked to them |
---|
245 | ] |
---|
246 | |
---|
247 | # create some variables for these |
---|
248 | for _dt in dep_types: |
---|
249 | # e.g. DT_INSTALL = 'DT_INSTALL' |
---|
250 | globals()[_dt] = _dt |
---|
251 | |
---|
252 | class DependencyNode: |
---|
253 | def __init__(self, filename): |
---|
254 | self.filename = filename |
---|
255 | |
---|
256 | def __str__(self): |
---|
257 | return self.filename |
---|
258 | |
---|
259 | class ObjectFile(DependencyNode): |
---|
260 | def __init__(self, filename, compile_cmd = None): |
---|
261 | DependencyNode.__init__(self, filename) |
---|
262 | self.compile_cmd = compile_cmd |
---|
263 | self.source_generated = 0 |
---|
264 | |
---|
265 | class SWIGObject(ObjectFile): |
---|
266 | def __init__(self, filename, lang): |
---|
267 | ObjectFile.__init__(self, filename) |
---|
268 | self.lang = lang |
---|
269 | self.lang_abbrev = lang_abbrev[lang] |
---|
270 | self.source_generated = 1 |
---|
271 | ### hmm. this is Makefile-specific |
---|
272 | self.compile_cmd = '$(COMPILE_%s_WRAPPER)' % self.lang_abbrev.upper() |
---|
273 | |
---|
274 | class HeaderFile(DependencyNode): |
---|
275 | def __init__(self, filename, classname = None, compile_cmd = None): |
---|
276 | DependencyNode.__init__(self, filename) |
---|
277 | self.classname = classname |
---|
278 | self.compile_cmd = compile_cmd |
---|
279 | |
---|
280 | class SourceFile(DependencyNode): |
---|
281 | def __init__(self, filename, reldir): |
---|
282 | DependencyNode.__init__(self, filename) |
---|
283 | self.reldir = reldir |
---|
284 | |
---|
285 | class SWIGSource(SourceFile): |
---|
286 | def __init__(self, filename): |
---|
287 | SourceFile.__init__(self, filename, build_path_dirname(filename)) |
---|
288 | pass |
---|
289 | |
---|
290 | lang_abbrev = { |
---|
291 | 'python' : 'py', |
---|
292 | 'perl' : 'pl', |
---|
293 | 'ruby' : 'rb', |
---|
294 | } |
---|
295 | |
---|
296 | lang_full_name = { |
---|
297 | 'python' : 'Python', |
---|
298 | 'perl' : 'Perl', |
---|
299 | 'ruby' : 'Ruby', |
---|
300 | } |
---|
301 | |
---|
302 | lang_utillib_suffix = { |
---|
303 | 'python' : 'py', |
---|
304 | 'perl' : 'perl', |
---|
305 | 'ruby' : 'ruby', |
---|
306 | } |
---|
307 | |
---|
308 | class Target(DependencyNode): |
---|
309 | "A build target is a node in our dependency graph." |
---|
310 | |
---|
311 | def __init__(self, name, options, gen_obj): |
---|
312 | self.name = name |
---|
313 | self.gen_obj = gen_obj |
---|
314 | self.desc = options.get('description') |
---|
315 | self.path = options.get('path', '') |
---|
316 | self.add_deps = options.get('add-deps', '') |
---|
317 | self.add_install_deps = options.get('add-install-deps', '') |
---|
318 | self.msvc_name = options.get('msvc-name') # override project name |
---|
319 | |
---|
320 | def add_dependencies(self): |
---|
321 | # subclasses should override to provide behavior, as appropriate |
---|
322 | raise NotImplementedError |
---|
323 | |
---|
324 | class Section: |
---|
325 | """Represents an individual section of build.conf |
---|
326 | |
---|
327 | The Section class is sort of a factory class which is responsible for |
---|
328 | creating and keeping track of Target instances associated with a section |
---|
329 | of the configuration file. By default it only allows one Target per |
---|
330 | section, but subclasses may create multiple Targets. |
---|
331 | """ |
---|
332 | |
---|
333 | def __init__(self, target_class, name, options, gen_obj): |
---|
334 | self.target_class = target_class |
---|
335 | self.name = name |
---|
336 | self.options = options |
---|
337 | self.gen_obj = gen_obj |
---|
338 | |
---|
339 | def create_targets(self): |
---|
340 | """Create target instances""" |
---|
341 | self.target = self.target_class(self.name, self.options, self.gen_obj) |
---|
342 | self.target.add_dependencies() |
---|
343 | |
---|
344 | def get_targets(self): |
---|
345 | """Return list of target instances associated with this section""" |
---|
346 | return [self.target] |
---|
347 | |
---|
348 | def get_dep_targets(self, target): |
---|
349 | """Return list of targets from this section that "target" depends on""" |
---|
350 | return [self.target] |
---|
351 | |
---|
352 | class TargetLinked(Target): |
---|
353 | "The target is linked (by libtool) against other libraries." |
---|
354 | |
---|
355 | def __init__(self, name, options, gen_obj): |
---|
356 | Target.__init__(self, name, options, gen_obj) |
---|
357 | self.install = options.get('install') |
---|
358 | self.compile_cmd = options.get('compile-cmd') |
---|
359 | self.sources = options.get('sources', '*.c *.cpp') |
---|
360 | self.link_cmd = options.get('link-cmd', '$(LINK)') |
---|
361 | |
---|
362 | self.external_lib = options.get('external-lib') |
---|
363 | self.external_project = options.get('external-project') |
---|
364 | self.msvc_libs = options.get('msvc-libs', '').split() |
---|
365 | |
---|
366 | def add_dependencies(self): |
---|
367 | if self.external_lib or self.external_project: |
---|
368 | if self.external_project: |
---|
369 | self.gen_obj.projects.append(self) |
---|
370 | return |
---|
371 | |
---|
372 | # the specified install area depends upon this target |
---|
373 | self.gen_obj.graph.add(DT_INSTALL, self.install, self) |
---|
374 | |
---|
375 | sources = sorted(_collect_paths(self.sources or '*.c' or '*.cpp', self.path)) |
---|
376 | |
---|
377 | for srcs, reldir in sources: |
---|
378 | for src in srcs.split(" "): |
---|
379 | if glob.glob(src): |
---|
380 | if src[-2:] == '.c': |
---|
381 | objname = src[:-2] + self.objext |
---|
382 | elif src[-4:] == '.cpp': |
---|
383 | objname = src[:-4] + self.objext |
---|
384 | else: |
---|
385 | raise GenError('ERROR: unknown file extension on ' + src) |
---|
386 | |
---|
387 | ofile = ObjectFile(objname, self.compile_cmd) |
---|
388 | |
---|
389 | # object depends upon source |
---|
390 | self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir)) |
---|
391 | |
---|
392 | # target (a linked item) depends upon object |
---|
393 | self.gen_obj.graph.add(DT_LINK, self.name, ofile) |
---|
394 | |
---|
395 | # collect all the paths where stuff might get built |
---|
396 | ### we should collect this from the dependency nodes rather than |
---|
397 | ### the sources. "what dir are you going to put yourself into?" |
---|
398 | self.gen_obj.target_dirs.append(self.path) |
---|
399 | for pattern in self.sources.split(): |
---|
400 | dirname = build_path_dirname(pattern) |
---|
401 | if dirname: |
---|
402 | self.gen_obj.target_dirs.append(build_path_join(self.path, dirname)) |
---|
403 | |
---|
404 | class TargetExe(TargetLinked): |
---|
405 | def __init__(self, name, options, gen_obj): |
---|
406 | TargetLinked.__init__(self, name, options, gen_obj) |
---|
407 | |
---|
408 | if not (self.external_lib or self.external_project): |
---|
409 | extmap = self.gen_obj._extension_map |
---|
410 | self.objext = extmap['exe', 'object'] |
---|
411 | self.filename = build_path_join(self.path, name + extmap['exe', 'target']) |
---|
412 | |
---|
413 | self.manpages = options.get('manpages', '') |
---|
414 | self.testing = options.get('testing') |
---|
415 | |
---|
416 | def add_dependencies(self): |
---|
417 | TargetLinked.add_dependencies(self) |
---|
418 | |
---|
419 | # collect test programs |
---|
420 | if self.install == 'test': |
---|
421 | self.gen_obj.test_deps.append(self.filename) |
---|
422 | if self.testing != 'skip': |
---|
423 | self.gen_obj.test_progs.append(self.filename) |
---|
424 | elif self.install == 'bdb-test': |
---|
425 | self.gen_obj.bdb_test_deps.append(self.filename) |
---|
426 | if self.testing != 'skip': |
---|
427 | self.gen_obj.bdb_test_progs.append(self.filename) |
---|
428 | |
---|
429 | self.gen_obj.manpages.extend(self.manpages.split()) |
---|
430 | |
---|
431 | class TargetScript(Target): |
---|
432 | def add_dependencies(self): |
---|
433 | # we don't need to "compile" the sources, so there are no dependencies |
---|
434 | # to add here, except to get the script installed in the proper area. |
---|
435 | # note that the script might itself be generated, but that isn't a |
---|
436 | # concern here. |
---|
437 | self.gen_obj.graph.add(DT_INSTALL, self.install, self) |
---|
438 | |
---|
439 | class TargetLib(TargetLinked): |
---|
440 | def __init__(self, name, options, gen_obj): |
---|
441 | TargetLinked.__init__(self, name, options, gen_obj) |
---|
442 | |
---|
443 | if not (self.external_lib or self.external_project): |
---|
444 | extmap = gen_obj._extension_map |
---|
445 | self.objext = extmap['lib', 'object'] |
---|
446 | |
---|
447 | # the target file is the name, version, and appropriate extension |
---|
448 | tfile = '%s-%s%s' % (name, gen_obj.version, extmap['lib', 'target']) |
---|
449 | self.filename = build_path_join(self.path, tfile) |
---|
450 | |
---|
451 | # Is a library referencing symbols which are undefined at link time. |
---|
452 | self.undefined_lib_symbols = options.get('undefined-lib-symbols') == 'yes' |
---|
453 | |
---|
454 | self.msvc_static = options.get('msvc-static') == 'yes' # is a static lib |
---|
455 | self.msvc_fake = options.get('msvc-fake') == 'yes' # has fake target |
---|
456 | self.msvc_export = options.get('msvc-export', '').split() |
---|
457 | |
---|
458 | class TargetApacheMod(TargetLib): |
---|
459 | |
---|
460 | def __init__(self, name, options, gen_obj): |
---|
461 | TargetLib.__init__(self, name, options, gen_obj) |
---|
462 | |
---|
463 | tfile = name + self.gen_obj._extension_map['lib', 'target'] |
---|
464 | self.filename = build_path_join(self.path, tfile) |
---|
465 | |
---|
466 | # we have a custom linking rule |
---|
467 | ### hmm. this is Makefile-specific |
---|
468 | self.compile_cmd = '$(COMPILE_APACHE_MOD)' |
---|
469 | self.link_cmd = '$(LINK_APACHE_MOD)' |
---|
470 | |
---|
471 | class TargetRaModule(TargetLib): |
---|
472 | pass |
---|
473 | |
---|
474 | class TargetFsModule(TargetLib): |
---|
475 | pass |
---|
476 | |
---|
477 | class TargetDoc(Target): |
---|
478 | pass |
---|
479 | |
---|
480 | class TargetI18N(Target): |
---|
481 | "The target is a collection of .po files to be compiled by msgfmt." |
---|
482 | |
---|
483 | def __init__(self, name, options, gen_obj): |
---|
484 | Target.__init__(self, name, options, gen_obj) |
---|
485 | self.install = options.get('install') |
---|
486 | self.sources = options.get('sources') |
---|
487 | # Let the Makefile determine this via .SUFFIXES |
---|
488 | self.compile_cmd = None |
---|
489 | self.objext = '.mo' |
---|
490 | self.external_project = options.get('external-project') |
---|
491 | |
---|
492 | def add_dependencies(self): |
---|
493 | self.gen_obj.graph.add(DT_INSTALL, self.install, self) |
---|
494 | |
---|
495 | sources = sorted(_collect_paths(self.sources or '*.po', self.path)) |
---|
496 | |
---|
497 | for src, reldir in sources: |
---|
498 | if src[-3:] == '.po': |
---|
499 | objname = src[:-3] + self.objext |
---|
500 | else: |
---|
501 | raise GenError('ERROR: unknown file extension on ' + src) |
---|
502 | |
---|
503 | ofile = ObjectFile(objname, self.compile_cmd) |
---|
504 | |
---|
505 | # object depends upon source |
---|
506 | self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir)) |
---|
507 | |
---|
508 | # target depends upon object |
---|
509 | self.gen_obj.graph.add(DT_LINK, self.name, ofile) |
---|
510 | |
---|
511 | # Add us to the list of target dirs, so we're created in mkdir-init. |
---|
512 | self.gen_obj.target_dirs.append(self.path) |
---|
513 | |
---|
514 | class TargetSWIG(TargetLib): |
---|
515 | def __init__(self, name, options, gen_obj, lang): |
---|
516 | TargetLib.__init__(self, name, options, gen_obj) |
---|
517 | self.lang = lang |
---|
518 | self.desc = self.desc + ' for ' + lang_full_name[lang] |
---|
519 | self.include_runtime = options.get('include-runtime') == 'yes' |
---|
520 | |
---|
521 | ### hmm. this is Makefile-specific |
---|
522 | self.link_cmd = '$(LINK_%s_WRAPPER)' % lang_abbrev[lang].upper() |
---|
523 | |
---|
524 | def add_dependencies(self): |
---|
525 | # Look in source directory for dependencies |
---|
526 | self.gen_obj.target_dirs.append(self.path) |
---|
527 | |
---|
528 | sources = _collect_paths(self.sources, self.path) |
---|
529 | assert len(sources) == 1 ### simple assertions for now |
---|
530 | |
---|
531 | # get path to SWIG .i file |
---|
532 | ipath = sources[0][0] |
---|
533 | iname = build_path_basename(ipath) |
---|
534 | |
---|
535 | assert iname[-2:] == '.i' |
---|
536 | cname = iname[:-2] + '.c' |
---|
537 | oname = iname[:-2] + self.gen_obj._extension_map['lib', 'object'] |
---|
538 | |
---|
539 | # Extract SWIG module name from .i file name |
---|
540 | module_name = iname[:4] != 'svn_' and iname[:-2] or iname[4:-2] |
---|
541 | |
---|
542 | lib_extension = self.gen_obj._extension_map['lib', 'target'] |
---|
543 | if self.lang == "ruby": |
---|
544 | lib_filename = module_name + lib_extension |
---|
545 | elif self.lang == "perl": |
---|
546 | lib_filename = '_' + module_name.capitalize() + lib_extension |
---|
547 | else: |
---|
548 | lib_filename = '_' + module_name + lib_extension |
---|
549 | |
---|
550 | self.name = self.lang + '_' + module_name |
---|
551 | self.path = build_path_join(self.path, self.lang) |
---|
552 | if self.lang == "perl": |
---|
553 | self.path = build_path_join(self.path, "native") |
---|
554 | self.filename = build_path_join(self.path, lib_filename) |
---|
555 | |
---|
556 | ifile = SWIGSource(ipath) |
---|
557 | cfile = SWIGObject(build_path_join(self.path, cname), self.lang) |
---|
558 | ofile = SWIGObject(build_path_join(self.path, oname), self.lang) |
---|
559 | |
---|
560 | # the .c file depends upon the .i file |
---|
561 | self.gen_obj.graph.add(DT_SWIG_C, cfile, ifile) |
---|
562 | |
---|
563 | # the object depends upon the .c file |
---|
564 | self.gen_obj.graph.add(DT_OBJECT, ofile, cfile) |
---|
565 | |
---|
566 | # the library depends upon the object |
---|
567 | self.gen_obj.graph.add(DT_LINK, self.name, ofile) |
---|
568 | |
---|
569 | # the specified install area depends upon the library |
---|
570 | self.gen_obj.graph.add(DT_INSTALL, 'swig-' + lang_abbrev[self.lang], self) |
---|
571 | |
---|
572 | class Section(TargetLib.Section): |
---|
573 | def create_targets(self): |
---|
574 | self.targets = { } |
---|
575 | for lang in self.gen_obj.swig_lang: |
---|
576 | target = self.target_class(self.name, self.options, self.gen_obj, lang) |
---|
577 | target.add_dependencies() |
---|
578 | self.targets[lang] = target |
---|
579 | |
---|
580 | def get_targets(self): |
---|
581 | return list(self.targets.values()) |
---|
582 | |
---|
583 | def get_dep_targets(self, target): |
---|
584 | target = self.targets.get(target.lang, None) |
---|
585 | return target and [target] or [ ] |
---|
586 | |
---|
587 | class TargetSWIGLib(TargetLib): |
---|
588 | def __init__(self, name, options, gen_obj): |
---|
589 | TargetLib.__init__(self, name, options, gen_obj) |
---|
590 | self.lang = options.get('lang') |
---|
591 | |
---|
592 | class Section(TargetLib.Section): |
---|
593 | def get_dep_targets(self, target): |
---|
594 | if target.lang == self.target.lang: |
---|
595 | return [ self.target ] |
---|
596 | return [ ] |
---|
597 | |
---|
598 | class TargetProject(Target): |
---|
599 | def __init__(self, name, options, gen_obj): |
---|
600 | Target.__init__(self, name, options, gen_obj) |
---|
601 | self.cmd = options.get('cmd') |
---|
602 | self.release = options.get('release') |
---|
603 | self.debug = options.get('debug') |
---|
604 | |
---|
605 | def add_dependencies(self): |
---|
606 | self.gen_obj.projects.append(self) |
---|
607 | |
---|
608 | class TargetSWIGProject(TargetProject): |
---|
609 | def __init__(self, name, options, gen_obj): |
---|
610 | TargetProject.__init__(self, name, options, gen_obj) |
---|
611 | self.lang = options.get('lang') |
---|
612 | |
---|
613 | class TargetJava(TargetLinked): |
---|
614 | def __init__(self, name, options, gen_obj): |
---|
615 | TargetLinked.__init__(self, name, options, gen_obj) |
---|
616 | self.link_cmd = options.get('link-cmd') |
---|
617 | self.packages = options.get('package-roots', '').split() |
---|
618 | self.jar = options.get('jar') |
---|
619 | self.deps = [ ] |
---|
620 | |
---|
621 | class TargetJavaHeaders(TargetJava): |
---|
622 | def __init__(self, name, options, gen_obj): |
---|
623 | TargetJava.__init__(self, name, options, gen_obj) |
---|
624 | self.objext = '.class' |
---|
625 | self.javah_objext = '.h' |
---|
626 | self.headers = options.get('headers') |
---|
627 | self.classes = options.get('classes') |
---|
628 | self.package = options.get('package') |
---|
629 | self.output_dir = self.headers |
---|
630 | |
---|
631 | def add_dependencies(self): |
---|
632 | sources = _collect_paths(self.sources, self.path) |
---|
633 | |
---|
634 | for src, reldir in sources: |
---|
635 | if src[-5:] != '.java': |
---|
636 | raise GenError('ERROR: unknown file extension on ' + src) |
---|
637 | |
---|
638 | class_name = build_path_basename(src[:-5]) |
---|
639 | |
---|
640 | class_header = build_path_join(self.headers, class_name + '.h') |
---|
641 | class_header_win = build_path_join(self.headers, |
---|
642 | self.package.replace(".", "_") |
---|
643 | + "_" + class_name + '.h') |
---|
644 | class_pkg_list = self.package.split('.') |
---|
645 | class_pkg = build_path_join(*class_pkg_list) |
---|
646 | class_file = ObjectFile(build_path_join(self.classes, class_pkg, |
---|
647 | class_name + self.objext)) |
---|
648 | class_file.source_generated = 1 |
---|
649 | class_file.class_name = class_name |
---|
650 | hfile = HeaderFile(class_header, self.package + '.' + class_name, |
---|
651 | self.compile_cmd) |
---|
652 | hfile.filename_win = class_header_win |
---|
653 | hfile.source_generated = 1 |
---|
654 | self.gen_obj.graph.add(DT_OBJECT, hfile, class_file) |
---|
655 | self.deps.append(hfile) |
---|
656 | |
---|
657 | # target (a linked item) depends upon object |
---|
658 | self.gen_obj.graph.add(DT_LINK, self.name, hfile) |
---|
659 | |
---|
660 | |
---|
661 | # collect all the paths where stuff might get built |
---|
662 | ### we should collect this from the dependency nodes rather than |
---|
663 | ### the sources. "what dir are you going to put yourself into?" |
---|
664 | self.gen_obj.target_dirs.append(self.path) |
---|
665 | self.gen_obj.target_dirs.append(self.classes) |
---|
666 | self.gen_obj.target_dirs.append(self.headers) |
---|
667 | for pattern in self.sources.split(): |
---|
668 | dirname = build_path_dirname(pattern) |
---|
669 | if dirname: |
---|
670 | self.gen_obj.target_dirs.append(build_path_join(self.path, dirname)) |
---|
671 | |
---|
672 | self.gen_obj.graph.add(DT_INSTALL, self.name, self) |
---|
673 | |
---|
674 | class TargetJavaClasses(TargetJava): |
---|
675 | def __init__(self, name, options, gen_obj): |
---|
676 | TargetJava.__init__(self, name, options, gen_obj) |
---|
677 | self.objext = '.class' |
---|
678 | self.lang = 'java' |
---|
679 | self.classes = options.get('classes') |
---|
680 | self.output_dir = self.classes |
---|
681 | |
---|
682 | def add_dependencies(self): |
---|
683 | sources =_collect_paths(self.sources, self.path) |
---|
684 | |
---|
685 | for src, reldir in sources: |
---|
686 | if src[-5:] == '.java': |
---|
687 | objname = src[:-5] + self.objext |
---|
688 | |
---|
689 | # As .class files are likely not generated into the same |
---|
690 | # directory as the source files, the object path may need |
---|
691 | # adjustment. To this effect, take "target_ob.classes" into |
---|
692 | # account. |
---|
693 | dirs = build_path_split(objname) |
---|
694 | sourcedirs = dirs[:-1] # Last element is the .class file name. |
---|
695 | while sourcedirs: |
---|
696 | if sourcedirs.pop() in self.packages: |
---|
697 | sourcepath = build_path_join(*sourcedirs) |
---|
698 | objname = build_path_join(self.classes, *dirs[len(sourcedirs):]) |
---|
699 | break |
---|
700 | else: |
---|
701 | raise GenError('Unable to find Java package root in path "%s"' % objname) |
---|
702 | else: |
---|
703 | raise GenError('ERROR: unknown file extension on "' + src + '"') |
---|
704 | |
---|
705 | ofile = ObjectFile(objname, self.compile_cmd) |
---|
706 | sfile = SourceFile(src, reldir) |
---|
707 | sfile.sourcepath = sourcepath |
---|
708 | |
---|
709 | # object depends upon source |
---|
710 | self.gen_obj.graph.add(DT_OBJECT, ofile, sfile) |
---|
711 | |
---|
712 | # target (a linked item) depends upon object |
---|
713 | self.gen_obj.graph.add(DT_LINK, self.name, ofile) |
---|
714 | |
---|
715 | # Add the class file to the dependency tree for this target |
---|
716 | self.deps.append(ofile) |
---|
717 | |
---|
718 | # collect all the paths where stuff might get built |
---|
719 | ### we should collect this from the dependency nodes rather than |
---|
720 | ### the sources. "what dir are you going to put yourself into?" |
---|
721 | self.gen_obj.target_dirs.append(self.path) |
---|
722 | self.gen_obj.target_dirs.append(self.classes) |
---|
723 | for pattern in self.sources.split(): |
---|
724 | dirname = build_path_dirname(pattern) |
---|
725 | if dirname: |
---|
726 | self.gen_obj.target_dirs.append(build_path_join(self.path, dirname)) |
---|
727 | |
---|
728 | self.gen_obj.graph.add(DT_INSTALL, self.name, self) |
---|
729 | |
---|
730 | |
---|
731 | _build_types = { |
---|
732 | 'exe' : TargetExe, |
---|
733 | 'script' : TargetScript, |
---|
734 | 'lib' : TargetLib, |
---|
735 | 'doc' : TargetDoc, |
---|
736 | 'swig' : TargetSWIG, |
---|
737 | 'project' : TargetProject, |
---|
738 | 'swig_lib' : TargetSWIGLib, |
---|
739 | 'swig_project' : TargetSWIGProject, |
---|
740 | 'ra-module': TargetRaModule, |
---|
741 | 'fs-module': TargetFsModule, |
---|
742 | 'apache-mod': TargetApacheMod, |
---|
743 | 'javah' : TargetJavaHeaders, |
---|
744 | 'java' : TargetJavaClasses, |
---|
745 | 'i18n' : TargetI18N, |
---|
746 | } |
---|
747 | |
---|
748 | |
---|
749 | class GenError(Exception): |
---|
750 | pass |
---|
751 | |
---|
752 | |
---|
753 | # Path Handling Functions |
---|
754 | # |
---|
755 | # Build paths specified in build.conf are assumed to be always separated |
---|
756 | # by forward slashes, regardless of the current running os. |
---|
757 | # |
---|
758 | # Native paths are paths separated by os.sep. |
---|
759 | |
---|
760 | def native_path(path): |
---|
761 | """Convert a build path to a native path""" |
---|
762 | return path.replace('/', os.sep) |
---|
763 | |
---|
764 | def build_path(path): |
---|
765 | """Convert a native path to a build path""" |
---|
766 | path = path.replace(os.sep, '/') |
---|
767 | if os.altsep: |
---|
768 | path = path.replace(os.altsep, '/') |
---|
769 | return path |
---|
770 | |
---|
771 | def build_path_join(*path_parts): |
---|
772 | """Join path components into a build path""" |
---|
773 | return '/'.join(path_parts) |
---|
774 | |
---|
775 | def build_path_split(path): |
---|
776 | """Return list of components in a build path""" |
---|
777 | return path.split('/') |
---|
778 | |
---|
779 | def build_path_splitfile(path): |
---|
780 | """Return the filename and directory portions of a file path""" |
---|
781 | pos = path.rfind('/') |
---|
782 | if pos > 0: |
---|
783 | return path[:pos], path[pos+1:] |
---|
784 | elif pos == 0: |
---|
785 | return path[0], path[1:] |
---|
786 | else: |
---|
787 | return "", path |
---|
788 | |
---|
789 | def build_path_dirname(path): |
---|
790 | """Return the directory portion of a file path""" |
---|
791 | return build_path_splitfile(path)[0] |
---|
792 | |
---|
793 | def build_path_basename(path): |
---|
794 | """Return the filename portion of a file path""" |
---|
795 | return build_path_splitfile(path)[1] |
---|
796 | |
---|
797 | def build_path_retreat(path): |
---|
798 | "Given a relative directory, return ../ paths to retreat to the origin." |
---|
799 | return ".." + "/.." * path.count('/') |
---|
800 | |
---|
801 | def build_path_strip(path, files): |
---|
802 | "Strip the given path from each file." |
---|
803 | l = len(path) |
---|
804 | result = [ ] |
---|
805 | for file in files: |
---|
806 | if len(file) > l and file[:l] == path and file[l] == '/': |
---|
807 | result.append(file[l+1:]) |
---|
808 | else: |
---|
809 | result.append(file) |
---|
810 | return result |
---|
811 | |
---|
812 | def _collect_paths(pats, path=None): |
---|
813 | """Find files matching a space separated list of globs |
---|
814 | |
---|
815 | pats (string) is the list of glob patterns |
---|
816 | |
---|
817 | path (string), if specified, is a path that will be prepended to each |
---|
818 | glob pattern before it is evaluated |
---|
819 | |
---|
820 | If path is none the return value is a list of filenames, otherwise |
---|
821 | the return value is a list of 2-tuples. The first element in each tuple |
---|
822 | is a matching filename and the second element is the portion of the |
---|
823 | glob pattern which matched the file before its last forward slash (/) |
---|
824 | """ |
---|
825 | result = [ ] |
---|
826 | for base_pat in pats.split(): |
---|
827 | if path: |
---|
828 | pattern = build_path_join(path, base_pat) |
---|
829 | else: |
---|
830 | pattern = base_pat |
---|
831 | files = glob.glob(native_path(pattern)) or [pattern] |
---|
832 | |
---|
833 | if path is None: |
---|
834 | # just append the names to the result list |
---|
835 | for file in files: |
---|
836 | result.append(build_path(file)) |
---|
837 | else: |
---|
838 | # if we have paths, then we need to record how each source is located |
---|
839 | # relative to the specified path |
---|
840 | reldir = build_path_dirname(base_pat) |
---|
841 | for file in files: |
---|
842 | result.append((build_path(file), reldir)) |
---|
843 | |
---|
844 | return result |
---|
845 | |
---|
846 | _re_public_include = re.compile(r'^subversion/include/(\w+)\.h$') |
---|
847 | def _is_public_include(fname): |
---|
848 | return _re_public_include.match(build_path(fname)) |
---|
849 | |
---|
850 | def _swig_include_wrapper(fname): |
---|
851 | return native_path(_re_public_include.sub( |
---|
852 | r"subversion/bindings/swig/proxy/\1_h.swg", build_path(fname))) |
---|
853 | |
---|
854 | def _path_endswith(path, subpath): |
---|
855 | """Check if SUBPATH is a true path suffix of PATH. |
---|
856 | """ |
---|
857 | path_len = len(path) |
---|
858 | subpath_len = len(subpath) |
---|
859 | |
---|
860 | return (subpath_len > 0 and path_len >= subpath_len |
---|
861 | and path[-subpath_len:] == subpath |
---|
862 | and (path_len == subpath_len |
---|
863 | or (subpath[0] == os.sep and path[-subpath_len] == os.sep) |
---|
864 | or path[-subpath_len - 1] == os.sep)) |
---|
865 | |
---|
866 | class IncludeDependencyInfo: |
---|
867 | """Finds all dependencies between a named set of headers, and computes |
---|
868 | closure, so that individual C and SWIG source files can then be scanned, and |
---|
869 | the stored dependency data used to return all directly and indirectly |
---|
870 | referenced headers. |
---|
871 | |
---|
872 | Note that where SWIG is concerned, there are two different kinds of include: |
---|
873 | (1) those that include files in SWIG processing, and so matter to the |
---|
874 | generation of .c files. (These are %include, %import). |
---|
875 | (2) those that include references to C headers in the generated output, |
---|
876 | and so are not required at .c generation, only at .o generation. |
---|
877 | (These are %{ #include ... %}). |
---|
878 | |
---|
879 | This class works exclusively in native-style paths.""" |
---|
880 | |
---|
881 | def __init__(self, filenames, fnames_nonexist): |
---|
882 | """Operation of an IncludeDependencyInfo instance is restricted to a |
---|
883 | 'domain' - a set of header files which are considered interesting when |
---|
884 | following and reporting dependencies. This is done to avoid creating any |
---|
885 | dependencies on system header files. The domain is defined by three |
---|
886 | factors: |
---|
887 | (1) FILENAMES is a list of headers which are in the domain, and should be |
---|
888 | scanned to discover how they inter-relate. |
---|
889 | (2) FNAMES_NONEXIST is a list of headers which are in the domain, but will |
---|
890 | be created by the build process, and so are not available to be |
---|
891 | scanned - they will be assumed not to depend on any other interesting |
---|
892 | headers. |
---|
893 | (3) Files in subversion/bindings/swig/proxy/, which are based |
---|
894 | autogenerated based on files in subversion/include/, will be added to |
---|
895 | the domain when a file in subversion/include/ is processed, and |
---|
896 | dependencies will be deduced by special-case logic. |
---|
897 | """ |
---|
898 | |
---|
899 | # This defines the domain (i.e. set of files) in which dependencies are |
---|
900 | # being located. Its structure is: |
---|
901 | # { 'basename.h': [ 'path/to/something/named/basename.h', |
---|
902 | # 'path/to/another/named/basename.h', ] } |
---|
903 | self._domain = {} |
---|
904 | for fname in filenames + fnames_nonexist: |
---|
905 | bname = os.path.basename(fname) |
---|
906 | self._domain.setdefault(bname, []).append(fname) |
---|
907 | if _is_public_include(fname): |
---|
908 | swig_fname = _swig_include_wrapper(fname) |
---|
909 | swig_bname = os.path.basename(swig_fname) |
---|
910 | self._domain.setdefault(swig_bname, []).append(swig_fname) |
---|
911 | |
---|
912 | # This data structure is: |
---|
913 | # { 'full/path/to/header.h': { 'full/path/to/dependency.h': TYPECODE, } } |
---|
914 | # TYPECODE is '#', denoting a C include, or '%' denoting a SWIG include. |
---|
915 | self._deps = {} |
---|
916 | for fname in filenames: |
---|
917 | self._deps[fname] = self._scan_for_includes(fname) |
---|
918 | if _is_public_include(fname): |
---|
919 | hdrs = { self._domain["proxy.swg"][0]: '%', |
---|
920 | self._domain["apr.swg"][0]: '%', |
---|
921 | fname: '%' } |
---|
922 | for h in self._deps[fname].keys(): |
---|
923 | if _is_public_include(h): |
---|
924 | hdrs[_swig_include_wrapper(h)] = '%' |
---|
925 | else: |
---|
926 | raise RuntimeError("Public include '%s' depends on '%s', " \ |
---|
927 | "which is not a public include! What's going on?" % (fname, h)) |
---|
928 | swig_fname = _swig_include_wrapper(fname) |
---|
929 | swig_bname = os.path.basename(swig_fname) |
---|
930 | self._deps[swig_fname] = hdrs |
---|
931 | for fname in fnames_nonexist: |
---|
932 | self._deps[fname] = {} |
---|
933 | |
---|
934 | # Keep recomputing closures until we see no more changes |
---|
935 | while 1: |
---|
936 | changes = 0 |
---|
937 | for fname in self._deps.keys(): |
---|
938 | changes = self._include_closure(self._deps[fname]) or changes |
---|
939 | if not changes: |
---|
940 | break |
---|
941 | |
---|
942 | def query_swig(self, fname): |
---|
943 | """Scan the C or SWIG file FNAME, and return the full paths of each |
---|
944 | include file that is a direct or indirect dependency, as a 2-tuple: |
---|
945 | (C_INCLUDES, SWIG_INCLUDES).""" |
---|
946 | if fname in self._deps: |
---|
947 | hdrs = self._deps[fname] |
---|
948 | else: |
---|
949 | hdrs = self._scan_for_includes(fname) |
---|
950 | self._include_closure(hdrs) |
---|
951 | c_filenames = [] |
---|
952 | swig_filenames = [] |
---|
953 | for hdr, hdr_type in hdrs.items(): |
---|
954 | if hdr_type == '#': |
---|
955 | c_filenames.append(hdr) |
---|
956 | else: # hdr_type == '%' |
---|
957 | swig_filenames.append(hdr) |
---|
958 | # Be independent of hash ordering |
---|
959 | c_filenames.sort() |
---|
960 | swig_filenames.sort() |
---|
961 | return (c_filenames, swig_filenames) |
---|
962 | |
---|
963 | def query(self, fname): |
---|
964 | """Same as SELF.QUERY_SWIG(FNAME), but assert that there are no SWIG |
---|
965 | includes, and return only C includes as a single list.""" |
---|
966 | c_includes, swig_includes = self.query_swig(fname) |
---|
967 | assert len(swig_includes) == 0 |
---|
968 | return c_includes |
---|
969 | |
---|
970 | def _include_closure(self, hdrs): |
---|
971 | """Mutate the passed dictionary HDRS, by performing a single pass |
---|
972 | through the listed headers, adding the headers on which the first group |
---|
973 | of headers depend, if not already present. |
---|
974 | |
---|
975 | HDRS is of the form { 'path/to/header.h': TYPECODE, } |
---|
976 | |
---|
977 | Return a boolean indicating whether any changes were made.""" |
---|
978 | items = list(hdrs.items()) |
---|
979 | for this_hdr, this_type in items: |
---|
980 | for dependency_hdr, dependency_type in self._deps[this_hdr].items(): |
---|
981 | self._upd_dep_hash(hdrs, dependency_hdr, dependency_type) |
---|
982 | return (len(items) != len(hdrs)) |
---|
983 | |
---|
984 | def _upd_dep_hash(self, hash, hdr, type): |
---|
985 | """Mutate HASH (a data structure of the form |
---|
986 | { 'path/to/header.h': TYPECODE, } ) to include additional info of a |
---|
987 | dependency of type TYPE on the file HDR.""" |
---|
988 | # '%' (SWIG, .c: .i) has precedence over '#' (C, .o: .c) |
---|
989 | if hash.get(hdr) != '%': |
---|
990 | hash[hdr] = type |
---|
991 | |
---|
992 | _re_include = \ |
---|
993 | re.compile(r'^\s*([#%])\s*(?:include|import)\s*([<"])?([^<">;\s]+)') |
---|
994 | def _scan_for_includes(self, fname): |
---|
995 | """Scan C source file FNAME and return the basenames of any headers |
---|
996 | which are directly included, and within the set defined when this |
---|
997 | IncludeDependencyProcessor was initialized. |
---|
998 | |
---|
999 | Return a dictionary with included full file names as keys and None as |
---|
1000 | values.""" |
---|
1001 | hdrs = { } |
---|
1002 | for line in fileinput.input(fname): |
---|
1003 | match = self._re_include.match(line) |
---|
1004 | if not match: |
---|
1005 | continue |
---|
1006 | include_param = native_path(match.group(3)) |
---|
1007 | type_code = match.group(1) |
---|
1008 | direct_possibility_fname = os.path.normpath(os.path.join( |
---|
1009 | os.path.dirname(fname), include_param)) |
---|
1010 | domain_fnames = self._domain.get(os.path.basename(include_param), []) |
---|
1011 | if direct_possibility_fname in domain_fnames: |
---|
1012 | self._upd_dep_hash(hdrs, direct_possibility_fname, type_code) |
---|
1013 | elif (len(domain_fnames) == 1 |
---|
1014 | and (include_param.find(os.sep) == -1 |
---|
1015 | or _path_endswith(domain_fnames[0], include_param))): |
---|
1016 | self._upd_dep_hash(hdrs, domain_fnames[0], type_code) |
---|
1017 | else: |
---|
1018 | # None found |
---|
1019 | if include_param.find(os.sep) == -1 and len(domain_fnames) > 1: |
---|
1020 | _error( |
---|
1021 | "Unable to determine which file is being included\n" |
---|
1022 | " Include Parameter: '%s'\n" |
---|
1023 | " Including File: '%s'\n" |
---|
1024 | " Direct possibility: '%s'\n" |
---|
1025 | " Other possibilities: %s\n" |
---|
1026 | % (include_param, fname, direct_possibility_fname, |
---|
1027 | domain_fnames)) |
---|
1028 | if match.group(2) == '"': |
---|
1029 | _warning('"%s" header not found, file %s' % (include_param, fname)) |
---|
1030 | continue |
---|
1031 | if match.group(2) == '<': |
---|
1032 | _warning('<%s> header *found*, file %s' % (include_param, fname)) |
---|
1033 | # The above warnings help to avoid the following problems: |
---|
1034 | # - If header is uses the correct <> or "" convention, then the warnings |
---|
1035 | # reveal if the build generator does/does not make dependencies for it |
---|
1036 | # when it should not/should - e.g. might reveal changes needed to |
---|
1037 | # build.conf. |
---|
1038 | # ...and... |
---|
1039 | # - If the generator is correct, them the warnings reveal incorrect use |
---|
1040 | # of <>/"" convention. |
---|
1041 | return hdrs |
---|
1042 | |
---|
1043 | |
---|
1044 | def _sorted_files(graph, area): |
---|
1045 | "Given a list of targets, sort them based on their dependencies." |
---|
1046 | |
---|
1047 | # we're going to just go with a naive algorithm here. these lists are |
---|
1048 | # going to be so short, that we can use O(n^2) or whatever this is. |
---|
1049 | |
---|
1050 | inst_targets = graph.get_sources(DT_INSTALL, area) |
---|
1051 | |
---|
1052 | # first we need our own copy of the target list since we're going to |
---|
1053 | # munge it. |
---|
1054 | targets = inst_targets[:] |
---|
1055 | |
---|
1056 | # the output list of the targets' files |
---|
1057 | files = [ ] |
---|
1058 | |
---|
1059 | # loop while we have targets remaining: |
---|
1060 | while targets: |
---|
1061 | # find a target that has no dependencies in our current targets list. |
---|
1062 | for t in targets: |
---|
1063 | s = graph.get_sources(DT_LINK, t.name, Target) \ |
---|
1064 | + graph.get_sources(DT_NONLIB, t.name, Target) |
---|
1065 | for d in s: |
---|
1066 | if d in targets: |
---|
1067 | break |
---|
1068 | else: |
---|
1069 | # no dependencies found in the targets list. this is a good "base" |
---|
1070 | # to add to the files list now. |
---|
1071 | if isinstance(t, TargetJava): |
---|
1072 | # Java targets have no filename, and we just ignore them. |
---|
1073 | pass |
---|
1074 | elif isinstance(t, TargetI18N): |
---|
1075 | # I18N targets have no filename, we recurse one level deeper, and |
---|
1076 | # get the filenames of their dependencies. |
---|
1077 | s = graph.get_sources(DT_LINK, t.name) |
---|
1078 | for d in s: |
---|
1079 | if d not in targets: |
---|
1080 | files.append(d.filename) |
---|
1081 | else: |
---|
1082 | files.append(t.filename) |
---|
1083 | |
---|
1084 | # don't consider this target any more |
---|
1085 | targets.remove(t) |
---|
1086 | |
---|
1087 | # break out of search through targets |
---|
1088 | break |
---|
1089 | else: |
---|
1090 | # we went through the entire target list and everything had at least |
---|
1091 | # one dependency on another target. thus, we have a circular dependency |
---|
1092 | # tree. somebody messed up the .conf file, or the app truly does have |
---|
1093 | # a loop (and if so, they're screwed; libtool can't relink a lib at |
---|
1094 | # install time if the dependent libs haven't been installed yet) |
---|
1095 | raise CircularDependencies() |
---|
1096 | |
---|
1097 | return files |
---|
1098 | |
---|
1099 | class CircularDependencies(Exception): |
---|
1100 | pass |
---|
1101 | |
---|
1102 | def unique(seq): |
---|
1103 | "Eliminate duplicates from a sequence" |
---|
1104 | list = [ ] |
---|
1105 | dupes = { } |
---|
1106 | for e in seq: |
---|
1107 | if e not in dupes: |
---|
1108 | dupes[e] = None |
---|
1109 | list.append(e) |
---|
1110 | return list |
---|
1111 | |
---|
1112 | ### End of file. |
---|