Package SCons :: Package Node
[hide private]
[frames] | no frames]

Source Code for Package SCons.Node

   1  """SCons.Node 
   2   
   3  The Node package for the SCons software construction utility. 
   4   
   5  This is, in many ways, the heart of SCons. 
   6   
   7  A Node is where we encapsulate all of the dependency information about 
   8  any thing that SCons can build, or about any thing which SCons can use 
   9  to build some other thing.  The canonical "thing," of course, is a file, 
  10  but a Node can also represent something remote (like a web page) or 
  11  something completely abstract (like an Alias). 
  12   
  13  Each specific type of "thing" is specifically represented by a subclass 
  14  of the Node base class:  Node.FS.File for files, Node.Alias for aliases, 
  15  etc.  Dependency information is kept here in the base class, and 
  16  information specific to files/aliases/etc. is in the subclass.  The 
  17  goal, if we've done this correctly, is that any type of "thing" should 
  18  be able to depend on any other type of "thing." 
  19   
  20  """ 
  21   
  22  from __future__ import print_function 
  23   
  24  # 
  25  # Copyright (c) 2001 - 2017 The SCons Foundation 
  26  # 
  27  # Permission is hereby granted, free of charge, to any person obtaining 
  28  # a copy of this software and associated documentation files (the 
  29  # "Software"), to deal in the Software without restriction, including 
  30  # without limitation the rights to use, copy, modify, merge, publish, 
  31  # distribute, sublicense, and/or sell copies of the Software, and to 
  32  # permit persons to whom the Software is furnished to do so, subject to 
  33  # the following conditions: 
  34  # 
  35  # The above copyright notice and this permission notice shall be included 
  36  # in all copies or substantial portions of the Software. 
  37  # 
  38  # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 
  39  # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 
  40  # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  41  # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 
  42  # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 
  43  # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 
  44  # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 
  45   
  46  __revision__ = "src/engine/SCons/Node/__init__.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog" 
  47   
  48  import collections 
  49  import copy 
  50  from itertools import chain 
  51   
  52  import SCons.Debug 
  53  from SCons.Debug import logInstanceCreation 
  54  import SCons.Executor 
  55  import SCons.Memoize 
  56  import SCons.Util 
  57   
  58  from SCons.Debug import Trace 
  59   
  60  from SCons.compat import with_metaclass, NoSlotsPyPy 
  61   
  62  print_duplicate = 0 
63 64 -def classname(obj):
65 return str(obj.__class__).split('.')[-1]
66 67 # Set to false if we're doing a dry run. There's more than one of these 68 # little treats 69 do_store_info = True 70 71 # Node states 72 # 73 # These are in "priority" order, so that the maximum value for any 74 # child/dependency of a node represents the state of that node if 75 # it has no builder of its own. The canonical example is a file 76 # system directory, which is only up to date if all of its children 77 # were up to date. 78 no_state = 0 79 pending = 1 80 executing = 2 81 up_to_date = 3 82 executed = 4 83 failed = 5 84 85 StateString = { 86 0 : "no_state", 87 1 : "pending", 88 2 : "executing", 89 3 : "up_to_date", 90 4 : "executed", 91 5 : "failed", 92 } 93 94 # controls whether implicit dependencies are cached: 95 implicit_cache = 0 96 97 # controls whether implicit dep changes are ignored: 98 implicit_deps_unchanged = 0 99 100 # controls whether the cached implicit deps are ignored: 101 implicit_deps_changed = 0
102 103 # A variable that can be set to an interface-specific function be called 104 # to annotate a Node with information about its creation. 105 -def do_nothing(node): pass
106 107 Annotate = do_nothing 108 109 # Gets set to 'True' if we're running in interactive mode. Is 110 # currently used to release parts of a target's info during 111 # clean builds and update runs (see release_target_info). 112 interactive = False
113 114 -def is_derived_none(node):
115 raise NotImplementedError
116
117 -def is_derived_node(node):
118 """ 119 Returns true if this node is derived (i.e. built). 120 """ 121 return node.has_builder() or node.side_effect
122 123 _is_derived_map = {0 : is_derived_none, 124 1 : is_derived_node}
125 126 -def exists_none(node):
127 raise NotImplementedError
128
129 -def exists_always(node):
130 return 1
131
132 -def exists_base(node):
133 return node.stat() is not None
134
135 -def exists_entry(node):
136 """Return if the Entry exists. Check the file system to see 137 what we should turn into first. Assume a file if there's no 138 directory.""" 139 node.disambiguate() 140 return _exists_map[node._func_exists](node)
141
142 -def exists_file(node):
143 # Duplicate from source path if we are set up to do this. 144 if node.duplicate and not node.is_derived() and not node.linked: 145 src = node.srcnode() 146 if src is not node: 147 # At this point, src is meant to be copied in a variant directory. 148 src = src.rfile() 149 if src.get_abspath() != node.get_abspath(): 150 if src.exists(): 151 node.do_duplicate(src) 152 # Can't return 1 here because the duplication might 153 # not actually occur if the -n option is being used. 154 else: 155 # The source file does not exist. Make sure no old 156 # copy remains in the variant directory. 157 if print_duplicate: 158 print("dup: no src for %s, unlinking old variant copy"%self) 159 if exists_base(node) or node.islink(): 160 node.fs.unlink(node.get_internal_path()) 161 # Return None explicitly because the Base.exists() call 162 # above will have cached its value if the file existed. 163 return None 164 return exists_base(node)
165 166 _exists_map = {0 : exists_none, 167 1 : exists_always, 168 2 : exists_base, 169 3 : exists_entry, 170 4 : exists_file}
171 172 173 -def rexists_none(node):
174 raise NotImplementedError
175
176 -def rexists_node(node):
177 return node.exists()
178
179 -def rexists_base(node):
180 return node.rfile().exists()
181 182 _rexists_map = {0 : rexists_none, 183 1 : rexists_node, 184 2 : rexists_base}
185 186 -def get_contents_none(node):
187 raise NotImplementedError
188
189 -def get_contents_entry(node):
190 """Fetch the contents of the entry. Returns the exact binary 191 contents of the file.""" 192 try: 193 node = node.disambiguate(must_exist=1) 194 except SCons.Errors.UserError: 195 # There was nothing on disk with which to disambiguate 196 # this entry. Leave it as an Entry, but return a null 197 # string so calls to get_contents() in emitters and the 198 # like (e.g. in qt.py) don't have to disambiguate by hand 199 # or catch the exception. 200 return '' 201 else: 202 return _get_contents_map[node._func_get_contents](node)
203
204 -def get_contents_dir(node):
205 """Return content signatures and names of all our children 206 separated by new-lines. Ensure that the nodes are sorted.""" 207 contents = [] 208 for n in sorted(node.children(), key=lambda t: t.name): 209 contents.append('%s %s\n' % (n.get_csig(), n.name)) 210 return ''.join(contents)
211
212 -def get_contents_file(node):
213 if not node.rexists(): 214 return b'' 215 fname = node.rfile().get_abspath() 216 try: 217 with open(fname, "rb") as fp: 218 contents = fp.read() 219 except EnvironmentError as e: 220 if not e.filename: 221 e.filename = fname 222 raise 223 return contents
224 225 _get_contents_map = {0 : get_contents_none, 226 1 : get_contents_entry, 227 2 : get_contents_dir, 228 3 : get_contents_file}
229 230 -def target_from_source_none(node, prefix, suffix, splitext):
231 raise NotImplementedError
232
233 -def target_from_source_base(node, prefix, suffix, splitext):
234 return node.dir.Entry(prefix + splitext(node.name)[0] + suffix)
235 236 _target_from_source_map = {0 : target_from_source_none, 237 1 : target_from_source_base}
238 239 # 240 # The new decider subsystem for Nodes 241 # 242 # We would set and overwrite the changed_since_last_build function 243 # before, but for being able to use slots (less memory!) we now have 244 # a dictionary of the different decider functions. Then in the Node 245 # subclasses we simply store the index to the decider that should be 246 # used by it. 247 # 248 249 # 250 # First, the single decider functions 251 # 252 -def changed_since_last_build_node(node, target, prev_ni):
253 """ 254 255 Must be overridden in a specific subclass to return True if this 256 Node (a dependency) has changed since the last time it was used 257 to build the specified target. prev_ni is this Node's state (for 258 example, its file timestamp, length, maybe content signature) 259 as of the last time the target was built. 260 261 Note that this method is called through the dependency, not the 262 target, because a dependency Node must be able to use its own 263 logic to decide if it changed. For example, File Nodes need to 264 obey if we're configured to use timestamps, but Python Value Nodes 265 never use timestamps and always use the content. If this method 266 were called through the target, then each Node's implementation 267 of this method would have to have more complicated logic to 268 handle all the different Node types on which it might depend. 269 """ 270 raise NotImplementedError
271
272 -def changed_since_last_build_alias(node, target, prev_ni):
273 cur_csig = node.get_csig() 274 try: 275 return cur_csig != prev_ni.csig 276 except AttributeError: 277 return 1
278
279 -def changed_since_last_build_entry(node, target, prev_ni):
280 node.disambiguate() 281 return _decider_map[node.changed_since_last_build](node, target, prev_ni)
282
283 -def changed_since_last_build_state_changed(node, target, prev_ni):
284 return (node.state != SCons.Node.up_to_date)
285
286 -def decide_source(node, target, prev_ni):
287 return target.get_build_env().decide_source(node, target, prev_ni)
288
289 -def decide_target(node, target, prev_ni):
290 return target.get_build_env().decide_target(node, target, prev_ni)
291
292 -def changed_since_last_build_python(node, target, prev_ni):
293 cur_csig = node.get_csig() 294 try: 295 return cur_csig != prev_ni.csig 296 except AttributeError: 297 return 1
298 299 300 # 301 # Now, the mapping from indices to decider functions 302 # 303 _decider_map = {0 : changed_since_last_build_node, 304 1 : changed_since_last_build_alias, 305 2 : changed_since_last_build_entry, 306 3 : changed_since_last_build_state_changed, 307 4 : decide_source, 308 5 : decide_target, 309 6 : changed_since_last_build_python} 310 311 do_store_info = True
312 313 # 314 # The new store_info subsystem for Nodes 315 # 316 # We would set and overwrite the store_info function 317 # before, but for being able to use slots (less memory!) we now have 318 # a dictionary of the different functions. Then in the Node 319 # subclasses we simply store the index to the info method that should be 320 # used by it. 321 # 322 323 # 324 # First, the single info functions 325 # 326 327 -def store_info_pass(node):
328 pass
329
330 -def store_info_file(node):
331 # Merge our build information into the already-stored entry. 332 # This accommodates "chained builds" where a file that's a target 333 # in one build (SConstruct file) is a source in a different build. 334 # See test/chained-build.py for the use case. 335 if do_store_info: 336 node.dir.sconsign().store_info(node.name, node)
337 338 339 store_info_map = {0 : store_info_pass, 340 1 : store_info_file}
341 342 # Classes for signature info for Nodes. 343 344 -class NodeInfoBase(object):
345 """ 346 The generic base class for signature information for a Node. 347 348 Node subclasses should subclass NodeInfoBase to provide their own 349 logic for dealing with their own Node-specific signature information. 350 """ 351 __slots__ = ('__weakref__',) 352 current_version_id = 2 353
354 - def update(self, node):
355 try: 356 field_list = self.field_list 357 except AttributeError: 358 return 359 for f in field_list: 360 try: 361 delattr(self, f) 362 except AttributeError: 363 pass 364 try: 365 func = getattr(node, 'get_' + f) 366 except AttributeError: 367 pass 368 else: 369 setattr(self, f, func())
370
371 - def convert(self, node, val):
372 pass
373
374 - def merge(self, other):
375 """ 376 Merge the fields of another object into this object. Already existing 377 information is overwritten by the other instance's data. 378 WARNING: If a '__dict__' slot is added, it should be updated instead of 379 replaced. 380 """ 381 state = other.__getstate__() 382 self.__setstate__(state)
383 - def format(self, field_list=None, names=0):
384 if field_list is None: 385 try: 386 field_list = self.field_list 387 except AttributeError: 388 field_list = list(getattr(self, '__dict__', {}).keys()) 389 for obj in type(self).mro(): 390 for slot in getattr(obj, '__slots__', ()): 391 if slot not in ('__weakref__', '__dict__'): 392 field_list.append(slot) 393 field_list.sort() 394 fields = [] 395 for field in field_list: 396 try: 397 f = getattr(self, field) 398 except AttributeError: 399 f = None 400 f = str(f) 401 if names: 402 f = field + ': ' + f 403 fields.append(f) 404 return fields
405
406 - def __getstate__(self):
407 """ 408 Return all fields that shall be pickled. Walk the slots in the class 409 hierarchy and add those to the state dictionary. If a '__dict__' slot is 410 available, copy all entries to the dictionary. Also include the version 411 id, which is fixed for all instances of a class. 412 """ 413 state = getattr(self, '__dict__', {}).copy() 414 for obj in type(self).mro(): 415 for name in getattr(obj,'__slots__',()): 416 if hasattr(self, name): 417 state[name] = getattr(self, name) 418 419 state['_version_id'] = self.current_version_id 420 try: 421 del state['__weakref__'] 422 except KeyError: 423 pass 424 return state
425
426 - def __setstate__(self, state):
427 """ 428 Restore the attributes from a pickled state. The version is discarded. 429 """ 430 # TODO check or discard version 431 del state['_version_id'] 432 433 for key, value in state.items(): 434 if key not in ('__weakref__',): 435 setattr(self, key, value)
436
437 438 -class BuildInfoBase(object):
439 """ 440 The generic base class for build information for a Node. 441 442 This is what gets stored in a .sconsign file for each target file. 443 It contains a NodeInfo instance for this node (signature information 444 that's specific to the type of Node) and direct attributes for the 445 generic build stuff we have to track: sources, explicit dependencies, 446 implicit dependencies, and action information. 447 """ 448 __slots__ = ("bsourcesigs", "bdependsigs", "bimplicitsigs", "bactsig", 449 "bsources", "bdepends", "bact", "bimplicit", "__weakref__") 450 current_version_id = 2 451
452 - def __init__(self):
453 # Create an object attribute from the class attribute so it ends up 454 # in the pickled data in the .sconsign file. 455 self.bsourcesigs = [] 456 self.bdependsigs = [] 457 self.bimplicitsigs = [] 458 self.bactsig = None
459
460 - def merge(self, other):
461 """ 462 Merge the fields of another object into this object. Already existing 463 information is overwritten by the other instance's data. 464 WARNING: If a '__dict__' slot is added, it should be updated instead of 465 replaced. 466 """ 467 state = other.__getstate__() 468 self.__setstate__(state)
469
470 - def __getstate__(self):
471 """ 472 Return all fields that shall be pickled. Walk the slots in the class 473 hierarchy and add those to the state dictionary. If a '__dict__' slot is 474 available, copy all entries to the dictionary. Also include the version 475 id, which is fixed for all instances of a class. 476 """ 477 state = getattr(self, '__dict__', {}).copy() 478 for obj in type(self).mro(): 479 for name in getattr(obj,'__slots__',()): 480 if hasattr(self, name): 481 state[name] = getattr(self, name) 482 483 state['_version_id'] = self.current_version_id 484 try: 485 del state['__weakref__'] 486 except KeyError: 487 pass 488 return state
489
490 - def __setstate__(self, state):
491 """ 492 Restore the attributes from a pickled state. 493 """ 494 # TODO check or discard version 495 del state['_version_id'] 496 for key, value in state.items(): 497 if key not in ('__weakref__',): 498 setattr(self, key, value)
499
500 501 -class Node(object, with_metaclass(NoSlotsPyPy)):
502 """The base Node class, for entities that we know how to 503 build, or use to build other Nodes. 504 """ 505 506 __slots__ = ['sources', 507 'sources_set', 508 '_specific_sources', 509 'depends', 510 'depends_set', 511 'ignore', 512 'ignore_set', 513 'prerequisites', 514 'implicit', 515 'waiting_parents', 516 'waiting_s_e', 517 'ref_count', 518 'wkids', 519 'env', 520 'state', 521 'precious', 522 'noclean', 523 'nocache', 524 'cached', 525 'always_build', 526 'includes', 527 'attributes', 528 'side_effect', 529 'side_effects', 530 'linked', 531 '_memo', 532 'executor', 533 'binfo', 534 'ninfo', 535 'builder', 536 'is_explicit', 537 'implicit_set', 538 'changed_since_last_build', 539 'store_info', 540 'pseudo', 541 '_tags', 542 '_func_is_derived', 543 '_func_exists', 544 '_func_rexists', 545 '_func_get_contents', 546 '_func_target_from_source'] 547
548 - class Attrs(object):
549 __slots__ = ('shared', '__dict__')
550 551
552 - def __init__(self):
553 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.Node') 554 # Note that we no longer explicitly initialize a self.builder 555 # attribute to None here. That's because the self.builder 556 # attribute may be created on-the-fly later by a subclass (the 557 # canonical example being a builder to fetch a file from a 558 # source code system like CVS or Subversion). 559 560 # Each list of children that we maintain is accompanied by a 561 # dictionary used to look up quickly whether a node is already 562 # present in the list. Empirical tests showed that it was 563 # fastest to maintain them as side-by-side Node attributes in 564 # this way, instead of wrapping up each list+dictionary pair in 565 # a class. (Of course, we could always still do that in the 566 # future if we had a good reason to...). 567 self.sources = [] # source files used to build node 568 self.sources_set = set() 569 self._specific_sources = False 570 self.depends = [] # explicit dependencies (from Depends) 571 self.depends_set = set() 572 self.ignore = [] # dependencies to ignore 573 self.ignore_set = set() 574 self.prerequisites = None 575 self.implicit = None # implicit (scanned) dependencies (None means not scanned yet) 576 self.waiting_parents = set() 577 self.waiting_s_e = set() 578 self.ref_count = 0 579 self.wkids = None # Kids yet to walk, when it's an array 580 581 self.env = None 582 self.state = no_state 583 self.precious = None 584 self.pseudo = False 585 self.noclean = 0 586 self.nocache = 0 587 self.cached = 0 # is this node pulled from cache? 588 self.always_build = None 589 self.includes = None 590 self.attributes = self.Attrs() # Generic place to stick information about the Node. 591 self.side_effect = 0 # true iff this node is a side effect 592 self.side_effects = [] # the side effects of building this target 593 self.linked = 0 # is this node linked to the variant directory? 594 self.changed_since_last_build = 0 595 self.store_info = 0 596 self._tags = None 597 self._func_is_derived = 1 598 self._func_exists = 1 599 self._func_rexists = 1 600 self._func_get_contents = 0 601 self._func_target_from_source = 0 602 603 self.clear_memoized_values() 604 605 # Let the interface in which the build engine is embedded 606 # annotate this Node with its own info (like a description of 607 # what line in what file created the node, for example). 608 Annotate(self)
609
610 - def disambiguate(self, must_exist=None):
611 return self
612
613 - def get_suffix(self):
614 return ''
615 616 @SCons.Memoize.CountMethodCall
617 - def get_build_env(self):
618 """Fetch the appropriate Environment to build this node. 619 """ 620 try: 621 return self._memo['get_build_env'] 622 except KeyError: 623 pass 624 result = self.get_executor().get_build_env() 625 self._memo['get_build_env'] = result 626 return result
627
628 - def get_build_scanner_path(self, scanner):
629 """Fetch the appropriate scanner path for this node.""" 630 return self.get_executor().get_build_scanner_path(scanner)
631
632 - def set_executor(self, executor):
633 """Set the action executor for this node.""" 634 self.executor = executor
635
636 - def get_executor(self, create=1):
637 """Fetch the action executor for this node. Create one if 638 there isn't already one, and requested to do so.""" 639 try: 640 executor = self.executor 641 except AttributeError: 642 if not create: 643 raise 644 try: 645 act = self.builder.action 646 except AttributeError: 647 executor = SCons.Executor.Null(targets=[self]) 648 else: 649 executor = SCons.Executor.Executor(act, 650 self.env or self.builder.env, 651 [self.builder.overrides], 652 [self], 653 self.sources) 654 self.executor = executor 655 return executor
656
657 - def executor_cleanup(self):
658 """Let the executor clean up any cached information.""" 659 try: 660 executor = self.get_executor(create=None) 661 except AttributeError: 662 pass 663 else: 664 if executor is not None: 665 executor.cleanup()
666
667 - def reset_executor(self):
668 "Remove cached executor; forces recompute when needed." 669 try: 670 delattr(self, 'executor') 671 except AttributeError: 672 pass
673
674 - def push_to_cache(self):
675 """Try to push a node into a cache 676 """ 677 pass
678
679 - def retrieve_from_cache(self):
680 """Try to retrieve the node's content from a cache 681 682 This method is called from multiple threads in a parallel build, 683 so only do thread safe stuff here. Do thread unsafe stuff in 684 built(). 685 686 Returns true if the node was successfully retrieved. 687 """ 688 return 0
689 690 # 691 # Taskmaster interface subsystem 692 # 693
694 - def make_ready(self):
695 """Get a Node ready for evaluation. 696 697 This is called before the Taskmaster decides if the Node is 698 up-to-date or not. Overriding this method allows for a Node 699 subclass to be disambiguated if necessary, or for an implicit 700 source builder to be attached. 701 """ 702 pass
703
704 - def prepare(self):
705 """Prepare for this Node to be built. 706 707 This is called after the Taskmaster has decided that the Node 708 is out-of-date and must be rebuilt, but before actually calling 709 the method to build the Node. 710 711 This default implementation checks that explicit or implicit 712 dependencies either exist or are derived, and initializes the 713 BuildInfo structure that will hold the information about how 714 this node is, uh, built. 715 716 (The existence of source files is checked separately by the 717 Executor, which aggregates checks for all of the targets built 718 by a specific action.) 719 720 Overriding this method allows for for a Node subclass to remove 721 the underlying file from the file system. Note that subclass 722 methods should call this base class method to get the child 723 check and the BuildInfo structure. 724 """ 725 if self.depends is not None: 726 for d in self.depends: 727 if d.missing(): 728 msg = "Explicit dependency `%s' not found, needed by target `%s'." 729 raise SCons.Errors.StopError(msg % (d, self)) 730 if self.implicit is not None: 731 for i in self.implicit: 732 if i.missing(): 733 msg = "Implicit dependency `%s' not found, needed by target `%s'." 734 raise SCons.Errors.StopError(msg % (i, self)) 735 self.binfo = self.get_binfo()
736
737 - def build(self, **kw):
738 """Actually build the node. 739 740 This is called by the Taskmaster after it's decided that the 741 Node is out-of-date and must be rebuilt, and after the prepare() 742 method has gotten everything, uh, prepared. 743 744 This method is called from multiple threads in a parallel build, 745 so only do thread safe stuff here. Do thread unsafe stuff 746 in built(). 747 748 """ 749 try: 750 self.get_executor()(self, **kw) 751 except SCons.Errors.BuildError as e: 752 e.node = self 753 raise
754
755 - def built(self):
756 """Called just after this node is successfully built.""" 757 758 # Clear the implicit dependency caches of any Nodes 759 # waiting for this Node to be built. 760 for parent in self.waiting_parents: 761 parent.implicit = None 762 763 self.clear() 764 765 if self.pseudo: 766 if self.exists(): 767 raise SCons.Errors.UserError("Pseudo target " + str(self) + " must not exist") 768 else: 769 if not self.exists() and do_store_info: 770 SCons.Warnings.warn(SCons.Warnings.TargetNotBuiltWarning, 771 "Cannot find target " + str(self) + " after building") 772 self.ninfo.update(self)
773
774 - def visited(self):
775 """Called just after this node has been visited (with or 776 without a build).""" 777 try: 778 binfo = self.binfo 779 except AttributeError: 780 # Apparently this node doesn't need build info, so 781 # don't bother calculating or storing it. 782 pass 783 else: 784 self.ninfo.update(self) 785 SCons.Node.store_info_map[self.store_info](self)
786
787 - def release_target_info(self):
788 """Called just after this node has been marked 789 up-to-date or was built completely. 790 791 This is where we try to release as many target node infos 792 as possible for clean builds and update runs, in order 793 to minimize the overall memory consumption. 794 795 By purging attributes that aren't needed any longer after 796 a Node (=File) got built, we don't have to care that much how 797 many KBytes a Node actually requires...as long as we free 798 the memory shortly afterwards. 799 800 @see: built() and File.release_target_info() 801 """ 802 pass
803 804 # 805 # 806 # 807
808 - def add_to_waiting_s_e(self, node):
809 self.waiting_s_e.add(node)
810
811 - def add_to_waiting_parents(self, node):
812 """ 813 Returns the number of nodes added to our waiting parents list: 814 1 if we add a unique waiting parent, 0 if not. (Note that the 815 returned values are intended to be used to increment a reference 816 count, so don't think you can "clean up" this function by using 817 True and False instead...) 818 """ 819 wp = self.waiting_parents 820 if node in wp: 821 return 0 822 wp.add(node) 823 return 1
824
825 - def postprocess(self):
826 """Clean up anything we don't need to hang onto after we've 827 been built.""" 828 self.executor_cleanup() 829 self.waiting_parents = set()
830
831 - def clear(self):
832 """Completely clear a Node of all its cached state (so that it 833 can be re-evaluated by interfaces that do continuous integration 834 builds). 835 """ 836 # The del_binfo() call here isn't necessary for normal execution, 837 # but is for interactive mode, where we might rebuild the same 838 # target and need to start from scratch. 839 self.del_binfo() 840 self.clear_memoized_values() 841 self.ninfo = self.new_ninfo() 842 self.executor_cleanup() 843 try: 844 delattr(self, '_calculated_sig') 845 except AttributeError: 846 pass 847 self.includes = None
848
849 - def clear_memoized_values(self):
850 self._memo = {}
851
852 - def builder_set(self, builder):
853 self.builder = builder 854 try: 855 del self.executor 856 except AttributeError: 857 pass
858
859 - def has_builder(self):
860 """Return whether this Node has a builder or not. 861 862 In Boolean tests, this turns out to be a *lot* more efficient 863 than simply examining the builder attribute directly ("if 864 node.builder: ..."). When the builder attribute is examined 865 directly, it ends up calling __getattr__ for both the __len__ 866 and __nonzero__ attributes on instances of our Builder Proxy 867 class(es), generating a bazillion extra calls and slowing 868 things down immensely. 869 """ 870 try: 871 b = self.builder 872 except AttributeError: 873 # There was no explicit builder for this Node, so initialize 874 # the self.builder attribute to None now. 875 b = self.builder = None 876 return b is not None
877
878 - def set_explicit(self, is_explicit):
880
881 - def has_explicit_builder(self):
882 """Return whether this Node has an explicit builder 883 884 This allows an internal Builder created by SCons to be marked 885 non-explicit, so that it can be overridden by an explicit 886 builder that the user supplies (the canonical example being 887 directories).""" 888 try: 889 return self.is_explicit 890 except AttributeError: 891 self.is_explicit = None 892 return self.is_explicit
893
894 - def get_builder(self, default_builder=None):
895 """Return the set builder, or a specified default value""" 896 try: 897 return self.builder 898 except AttributeError: 899 return default_builder
900 901 multiple_side_effect_has_builder = has_builder 902
903 - def is_derived(self):
904 """ 905 Returns true if this node is derived (i.e. built). 906 907 This should return true only for nodes whose path should be in 908 the variant directory when duplicate=0 and should contribute their build 909 signatures when they are used as source files to other derived files. For 910 example: source with source builders are not derived in this sense, 911 and hence should not return true. 912 """ 913 return _is_derived_map[self._func_is_derived](self)
914
915 - def alter_targets(self):
916 """Return a list of alternate targets for this Node. 917 """ 918 return [], None
919
920 - def get_found_includes(self, env, scanner, path):
921 """Return the scanned include lines (implicit dependencies) 922 found in this node. 923 924 The default is no implicit dependencies. We expect this method 925 to be overridden by any subclass that can be scanned for 926 implicit dependencies. 927 """ 928 return []
929
930 - def get_implicit_deps(self, env, initial_scanner, path_func, kw = {}):
931 """Return a list of implicit dependencies for this node. 932 933 This method exists to handle recursive invocation of the scanner 934 on the implicit dependencies returned by the scanner, if the 935 scanner's recursive flag says that we should. 936 """ 937 nodes = [self] 938 seen = set(nodes) 939 dependencies = [] 940 path_memo = {} 941 942 root_node_scanner = self._get_scanner(env, initial_scanner, None, kw) 943 944 while nodes: 945 node = nodes.pop(0) 946 947 scanner = node._get_scanner(env, initial_scanner, root_node_scanner, kw) 948 if not scanner: 949 continue 950 951 try: 952 path = path_memo[scanner] 953 except KeyError: 954 path = path_func(scanner) 955 path_memo[scanner] = path 956 957 included_deps = [x for x in node.get_found_includes(env, scanner, path) if x not in seen] 958 if included_deps: 959 dependencies.extend(included_deps) 960 seen.update(included_deps) 961 nodes.extend(scanner.recurse_nodes(included_deps)) 962 963 return dependencies
964
965 - def _get_scanner(self, env, initial_scanner, root_node_scanner, kw):
966 if initial_scanner: 967 # handle explicit scanner case 968 scanner = initial_scanner.select(self) 969 else: 970 # handle implicit scanner case 971 scanner = self.get_env_scanner(env, kw) 972 if scanner: 973 scanner = scanner.select(self) 974 975 if not scanner: 976 # no scanner could be found for the given node's scanner key; 977 # thus, make an attempt at using a default. 978 scanner = root_node_scanner 979 980 return scanner
981
982 - def get_env_scanner(self, env, kw={}):
983 return env.get_scanner(self.scanner_key())
984
985 - def get_target_scanner(self):
986 return self.builder.target_scanner
987
988 - def get_source_scanner(self, node):
989 """Fetch the source scanner for the specified node 990 991 NOTE: "self" is the target being built, "node" is 992 the source file for which we want to fetch the scanner. 993 994 Implies self.has_builder() is true; again, expect to only be 995 called from locations where this is already verified. 996 997 This function may be called very often; it attempts to cache 998 the scanner found to improve performance. 999 """ 1000 scanner = None 1001 try: 1002 scanner = self.builder.source_scanner 1003 except AttributeError: 1004 pass 1005 if not scanner: 1006 # The builder didn't have an explicit scanner, so go look up 1007 # a scanner from env['SCANNERS'] based on the node's scanner 1008 # key (usually the file extension). 1009 scanner = self.get_env_scanner(self.get_build_env()) 1010 if scanner: 1011 scanner = scanner.select(node) 1012 return scanner
1013
1014 - def add_to_implicit(self, deps):
1015 if not hasattr(self, 'implicit') or self.implicit is None: 1016 self.implicit = [] 1017 self.implicit_set = set() 1018 self._children_reset() 1019 self._add_child(self.implicit, self.implicit_set, deps)
1020
1021 - def scan(self):
1022 """Scan this node's dependents for implicit dependencies.""" 1023 # Don't bother scanning non-derived files, because we don't 1024 # care what their dependencies are. 1025 # Don't scan again, if we already have scanned. 1026 if self.implicit is not None: 1027 return 1028 self.implicit = [] 1029 self.implicit_set = set() 1030 self._children_reset() 1031 if not self.has_builder(): 1032 return 1033 1034 build_env = self.get_build_env() 1035 executor = self.get_executor() 1036 1037 # Here's where we implement --implicit-cache. 1038 if implicit_cache and not implicit_deps_changed: 1039 implicit = self.get_stored_implicit() 1040 if implicit is not None: 1041 # We now add the implicit dependencies returned from the 1042 # stored .sconsign entry to have already been converted 1043 # to Nodes for us. (We used to run them through a 1044 # source_factory function here.) 1045 1046 # Update all of the targets with them. This 1047 # essentially short-circuits an N*M scan of the 1048 # sources for each individual target, which is a hell 1049 # of a lot more efficient. 1050 for tgt in executor.get_all_targets(): 1051 tgt.add_to_implicit(implicit) 1052 1053 if implicit_deps_unchanged or self.is_up_to_date(): 1054 return 1055 # one of this node's sources has changed, 1056 # so we must recalculate the implicit deps for all targets 1057 for tgt in executor.get_all_targets(): 1058 tgt.implicit = [] 1059 tgt.implicit_set = set() 1060 1061 # Have the executor scan the sources. 1062 executor.scan_sources(self.builder.source_scanner) 1063 1064 # If there's a target scanner, have the executor scan the target 1065 # node itself and associated targets that might be built. 1066 scanner = self.get_target_scanner() 1067 if scanner: 1068 executor.scan_targets(scanner)
1069
1070 - def scanner_key(self):
1071 return None
1072
1073 - def select_scanner(self, scanner):
1074 """Selects a scanner for this Node. 1075 1076 This is a separate method so it can be overridden by Node 1077 subclasses (specifically, Node.FS.Dir) that *must* use their 1078 own Scanner and don't select one the Scanner.Selector that's 1079 configured for the target. 1080 """ 1081 return scanner.select(self)
1082
1083 - def env_set(self, env, safe=0):
1084 if safe and self.env: 1085 return 1086 self.env = env
1087 1088 # 1089 # SIGNATURE SUBSYSTEM 1090 # 1091 1092 NodeInfo = NodeInfoBase 1093 BuildInfo = BuildInfoBase 1094
1095 - def new_ninfo(self):
1096 ninfo = self.NodeInfo() 1097 return ninfo
1098
1099 - def get_ninfo(self):
1100 try: 1101 return self.ninfo 1102 except AttributeError: 1103 self.ninfo = self.new_ninfo() 1104 return self.ninfo
1105
1106 - def new_binfo(self):
1107 binfo = self.BuildInfo() 1108 return binfo
1109
1110 - def get_binfo(self):
1111 """ 1112 Fetch a node's build information. 1113 1114 node - the node whose sources will be collected 1115 cache - alternate node to use for the signature cache 1116 returns - the build signature 1117 1118 This no longer handles the recursive descent of the 1119 node's children's signatures. We expect that they're 1120 already built and updated by someone else, if that's 1121 what's wanted. 1122 """ 1123 try: 1124 return self.binfo 1125 except AttributeError: 1126 pass 1127 1128 binfo = self.new_binfo() 1129 self.binfo = binfo 1130 1131 executor = self.get_executor() 1132 ignore_set = self.ignore_set 1133 1134 if self.has_builder(): 1135 binfo.bact = str(executor) 1136 binfo.bactsig = SCons.Util.MD5signature(executor.get_contents()) 1137 1138 if self._specific_sources: 1139 sources = [ s for s in self.sources if not s in ignore_set] 1140 1141 else: 1142 sources = executor.get_unignored_sources(self, self.ignore) 1143 1144 seen = set() 1145 binfo.bsources = [s for s in sources if s not in seen and not seen.add(s)] 1146 binfo.bsourcesigs = [s.get_ninfo() for s in binfo.bsources] 1147 1148 1149 binfo.bdepends = self.depends 1150 binfo.bdependsigs = [d.get_ninfo() for d in self.depends if d not in ignore_set] 1151 1152 binfo.bimplicit = self.implicit or [] 1153 binfo.bimplicitsigs = [i.get_ninfo() for i in binfo.bimplicit if i not in ignore_set] 1154 1155 1156 return binfo
1157
1158 - def del_binfo(self):
1159 """Delete the build info from this node.""" 1160 try: 1161 delattr(self, 'binfo') 1162 except AttributeError: 1163 pass
1164
1165 - def get_csig(self):
1166 try: 1167 return self.ninfo.csig 1168 except AttributeError: 1169 ninfo = self.get_ninfo() 1170 ninfo.csig = SCons.Util.MD5signature(self.get_contents()) 1171 return self.ninfo.csig
1172
1173 - def get_cachedir_csig(self):
1174 return self.get_csig()
1175
1176 - def get_stored_info(self):
1177 return None
1178
1179 - def get_stored_implicit(self):
1180 """Fetch the stored implicit dependencies""" 1181 return None
1182 1183 # 1184 # 1185 # 1186
1187 - def set_precious(self, precious = 1):
1188 """Set the Node's precious value.""" 1189 self.precious = precious
1190
1191 - def set_pseudo(self, pseudo = True):
1192 """Set the Node's precious value.""" 1193 self.pseudo = pseudo
1194
1195 - def set_noclean(self, noclean = 1):
1196 """Set the Node's noclean value.""" 1197 # Make sure noclean is an integer so the --debug=stree 1198 # output in Util.py can use it as an index. 1199 self.noclean = noclean and 1 or 0
1200
1201 - def set_nocache(self, nocache = 1):
1202 """Set the Node's nocache value.""" 1203 # Make sure nocache is an integer so the --debug=stree 1204 # output in Util.py can use it as an index. 1205 self.nocache = nocache and 1 or 0
1206
1207 - def set_always_build(self, always_build = 1):
1208 """Set the Node's always_build value.""" 1209 self.always_build = always_build
1210
1211 - def exists(self):
1212 """Does this node exists?""" 1213 return _exists_map[self._func_exists](self)
1214
1215 - def rexists(self):
1216 """Does this node exist locally or in a repositiory?""" 1217 # There are no repositories by default: 1218 return _rexists_map[self._func_rexists](self)
1219
1220 - def get_contents(self):
1221 """Fetch the contents of the entry.""" 1222 return _get_contents_map[self._func_get_contents](self)
1223
1224 - def missing(self):
1225 return not self.is_derived() and \ 1226 not self.linked and \ 1227 not self.rexists()
1228
1229 - def remove(self):
1230 """Remove this Node: no-op by default.""" 1231 return None
1232
1233 - def add_dependency(self, depend):
1234 """Adds dependencies.""" 1235 try: 1236 self._add_child(self.depends, self.depends_set, depend) 1237 except TypeError as e: 1238 e = e.args[0] 1239 if SCons.Util.is_List(e): 1240 s = list(map(str, e)) 1241 else: 1242 s = str(e) 1243 raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1244
1245 - def add_prerequisite(self, prerequisite):
1246 """Adds prerequisites""" 1247 if self.prerequisites is None: 1248 self.prerequisites = SCons.Util.UniqueList() 1249 self.prerequisites.extend(prerequisite) 1250 self._children_reset()
1251
1252 - def add_ignore(self, depend):
1253 """Adds dependencies to ignore.""" 1254 try: 1255 self._add_child(self.ignore, self.ignore_set, depend) 1256 except TypeError as e: 1257 e = e.args[0] 1258 if SCons.Util.is_List(e): 1259 s = list(map(str, e)) 1260 else: 1261 s = str(e) 1262 raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1263
1264 - def add_source(self, source):
1265 """Adds sources.""" 1266 if self._specific_sources: 1267 return 1268 try: 1269 self._add_child(self.sources, self.sources_set, source) 1270 except TypeError as e: 1271 e = e.args[0] 1272 if SCons.Util.is_List(e): 1273 s = list(map(str, e)) 1274 else: 1275 s = str(e) 1276 raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1277
1278 - def _add_child(self, collection, set, child):
1279 """Adds 'child' to 'collection', first checking 'set' to see if it's 1280 already present.""" 1281 added = None 1282 for c in child: 1283 if c not in set: 1284 set.add(c) 1285 collection.append(c) 1286 added = 1 1287 if added: 1288 self._children_reset()
1289
1290 - def set_specific_source(self, source):
1291 self.add_source(source) 1292 self._specific_sources = True
1293
1294 - def add_wkid(self, wkid):
1295 """Add a node to the list of kids waiting to be evaluated""" 1296 if self.wkids is not None: 1297 self.wkids.append(wkid)
1298
1299 - def _children_reset(self):
1300 self.clear_memoized_values() 1301 # We need to let the Executor clear out any calculated 1302 # build info that it's cached so we can re-calculate it. 1303 self.executor_cleanup()
1304 1305 @SCons.Memoize.CountMethodCall
1306 - def _children_get(self):
1307 try: 1308 return self._memo['_children_get'] 1309 except KeyError: 1310 pass 1311 1312 # The return list may contain duplicate Nodes, especially in 1313 # source trees where there are a lot of repeated #includes 1314 # of a tangle of .h files. Profiling shows, however, that 1315 # eliminating the duplicates with a brute-force approach that 1316 # preserves the order (that is, something like: 1317 # 1318 # u = [] 1319 # for n in list: 1320 # if n not in u: 1321 # u.append(n)" 1322 # 1323 # takes more cycles than just letting the underlying methods 1324 # hand back cached values if a Node's information is requested 1325 # multiple times. (Other methods of removing duplicates, like 1326 # using dictionary keys, lose the order, and the only ordered 1327 # dictionary patterns I found all ended up using "not in" 1328 # internally anyway...) 1329 if self.ignore_set: 1330 iter = chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f]) 1331 1332 children = [] 1333 for i in iter: 1334 if i not in self.ignore_set: 1335 children.append(i) 1336 else: 1337 children = self.all_children(scan=0) 1338 1339 self._memo['_children_get'] = children 1340 return children
1341
1342 - def all_children(self, scan=1):
1343 """Return a list of all the node's direct children.""" 1344 if scan: 1345 self.scan() 1346 1347 # The return list may contain duplicate Nodes, especially in 1348 # source trees where there are a lot of repeated #includes 1349 # of a tangle of .h files. Profiling shows, however, that 1350 # eliminating the duplicates with a brute-force approach that 1351 # preserves the order (that is, something like: 1352 # 1353 # u = [] 1354 # for n in list: 1355 # if n not in u: 1356 # u.append(n)" 1357 # 1358 # takes more cycles than just letting the underlying methods 1359 # hand back cached values if a Node's information is requested 1360 # multiple times. (Other methods of removing duplicates, like 1361 # using dictionary keys, lose the order, and the only ordered 1362 # dictionary patterns I found all ended up using "not in" 1363 # internally anyway...) 1364 return list(chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f]))
1365
1366 - def children(self, scan=1):
1367 """Return a list of the node's direct children, minus those 1368 that are ignored by this node.""" 1369 if scan: 1370 self.scan() 1371 return self._children_get()
1372
1373 - def set_state(self, state):
1374 self.state = state
1375
1376 - def get_state(self):
1377 return self.state
1378
1379 - def get_env(self):
1380 env = self.env 1381 if not env: 1382 import SCons.Defaults 1383 env = SCons.Defaults.DefaultEnvironment() 1384 return env
1385
1386 - def Decider(self, function):
1387 foundkey = None 1388 for k, v in _decider_map.items(): 1389 if v == function: 1390 foundkey = k 1391 break 1392 if not foundkey: 1393 foundkey = len(_decider_map) 1394 _decider_map[foundkey] = function 1395 self.changed_since_last_build = foundkey
1396
1397 - def Tag(self, key, value):
1398 """ Add a user-defined tag. """ 1399 if not self._tags: 1400 self._tags = {} 1401 self._tags[key] = value
1402
1403 - def GetTag(self, key):
1404 """ Return a user-defined tag. """ 1405 if not self._tags: 1406 return None 1407 return self._tags.get(key, None)
1408
1409 - def changed(self, node=None, allowcache=False):
1410 """ 1411 Returns if the node is up-to-date with respect to the BuildInfo 1412 stored last time it was built. The default behavior is to compare 1413 it against our own previously stored BuildInfo, but the stored 1414 BuildInfo from another Node (typically one in a Repository) 1415 can be used instead. 1416 1417 Note that we now *always* check every dependency. We used to 1418 short-circuit the check by returning as soon as we detected 1419 any difference, but we now rely on checking every dependency 1420 to make sure that any necessary Node information (for example, 1421 the content signature of an #included .h file) is updated. 1422 1423 The allowcache option was added for supporting the early 1424 release of the executor/builder structures, right after 1425 a File target was built. When set to true, the return 1426 value of this changed method gets cached for File nodes. 1427 Like this, the executor isn't needed any longer for subsequent 1428 calls to changed(). 1429 1430 @see: FS.File.changed(), FS.File.release_target_info() 1431 """ 1432 t = 0 1433 if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node)) 1434 if node is None: 1435 node = self 1436 1437 result = False 1438 1439 bi = node.get_stored_info().binfo 1440 then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs 1441 children = self.children() 1442 1443 diff = len(children) - len(then) 1444 if diff: 1445 # The old and new dependency lists are different lengths. 1446 # This always indicates that the Node must be rebuilt. 1447 # We also extend the old dependency list with enough None 1448 # entries to equal the new dependency list, for the benefit 1449 # of the loop below that updates node information. 1450 then.extend([None] * diff) 1451 if t: Trace(': old %s new %s' % (len(then), len(children))) 1452 result = True 1453 1454 for child, prev_ni in zip(children, then): 1455 if _decider_map[child.changed_since_last_build](child, self, prev_ni): 1456 if t: Trace(': %s changed' % child) 1457 result = True 1458 1459 contents = self.get_executor().get_contents() 1460 if self.has_builder(): 1461 import SCons.Util 1462 newsig = SCons.Util.MD5signature(contents) 1463 if bi.bactsig != newsig: 1464 if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig)) 1465 result = True 1466 1467 if not result: 1468 if t: Trace(': up to date') 1469 1470 if t: Trace('\n') 1471 1472 return result
1473
1474 - def is_up_to_date(self):
1475 """Default check for whether the Node is current: unknown Node 1476 subtypes are always out of date, so they will always get built.""" 1477 return None
1478
1479 - def children_are_up_to_date(self):
1480 """Alternate check for whether the Node is current: If all of 1481 our children were up-to-date, then this Node was up-to-date, too. 1482 1483 The SCons.Node.Alias and SCons.Node.Python.Value subclasses 1484 rebind their current() method to this method.""" 1485 # Allow the children to calculate their signatures. 1486 self.binfo = self.get_binfo() 1487 if self.always_build: 1488 return None 1489 state = 0 1490 for kid in self.children(None): 1491 s = kid.get_state() 1492 if s and (not state or s > state): 1493 state = s 1494 return (state == 0 or state == SCons.Node.up_to_date)
1495
1496 - def is_literal(self):
1497 """Always pass the string representation of a Node to 1498 the command interpreter literally.""" 1499 return 1
1500
1501 - def render_include_tree(self):
1502 """ 1503 Return a text representation, suitable for displaying to the 1504 user, of the include tree for the sources of this node. 1505 """ 1506 if self.is_derived(): 1507 env = self.get_build_env() 1508 if env: 1509 for s in self.sources: 1510 scanner = self.get_source_scanner(s) 1511 if scanner: 1512 path = self.get_build_scanner_path(scanner) 1513 else: 1514 path = None 1515 def f(node, env=env, scanner=scanner, path=path): 1516 return node.get_found_includes(env, scanner, path)
1517 return SCons.Util.render_tree(s, f, 1) 1518 else: 1519 return None
1520
1521 - def get_abspath(self):
1522 """ 1523 Return an absolute path to the Node. This will return simply 1524 str(Node) by default, but for Node types that have a concept of 1525 relative path, this might return something different. 1526 """ 1527 return str(self)
1528
1529 - def for_signature(self):
1530 """ 1531 Return a string representation of the Node that will always 1532 be the same for this particular Node, no matter what. This 1533 is by contrast to the __str__() method, which might, for 1534 instance, return a relative path for a file Node. The purpose 1535 of this method is to generate a value to be used in signature 1536 calculation for the command line used to build a target, and 1537 we use this method instead of str() to avoid unnecessary 1538 rebuilds. This method does not need to return something that 1539 would actually work in a command line; it can return any kind of 1540 nonsense, so long as it does not change. 1541 """ 1542 return str(self)
1543
1544 - def get_string(self, for_signature):
1545 """This is a convenience function designed primarily to be 1546 used in command generators (i.e., CommandGeneratorActions or 1547 Environment variables that are callable), which are called 1548 with a for_signature argument that is nonzero if the command 1549 generator is being called to generate a signature for the 1550 command line, which determines if we should rebuild or not. 1551 1552 Such command generators should use this method in preference 1553 to str(Node) when converting a Node to a string, passing 1554 in the for_signature parameter, such that we will call 1555 Node.for_signature() or str(Node) properly, depending on whether 1556 we are calculating a signature or actually constructing a 1557 command line.""" 1558 if for_signature: 1559 return self.for_signature() 1560 return str(self)
1561
1562 - def get_subst_proxy(self):
1563 """ 1564 This method is expected to return an object that will function 1565 exactly like this Node, except that it implements any additional 1566 special features that we would like to be in effect for 1567 Environment variable substitution. The principle use is that 1568 some Nodes would like to implement a __getattr__() method, 1569 but putting that in the Node type itself has a tendency to kill 1570 performance. We instead put it in a proxy and return it from 1571 this method. It is legal for this method to return self 1572 if no new functionality is needed for Environment substitution. 1573 """ 1574 return self
1575
1576 - def explain(self):
1577 if not self.exists(): 1578 return "building `%s' because it doesn't exist\n" % self 1579 1580 if self.always_build: 1581 return "rebuilding `%s' because AlwaysBuild() is specified\n" % self 1582 1583 old = self.get_stored_info() 1584 if old is None: 1585 return None 1586 1587 old = old.binfo 1588 old.prepare_dependencies() 1589 1590 try: 1591 old_bkids = old.bsources + old.bdepends + old.bimplicit 1592 old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs 1593 except AttributeError: 1594 return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self 1595 1596 new = self.get_binfo() 1597 1598 new_bkids = new.bsources + new.bdepends + new.bimplicit 1599 new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs 1600 1601 osig = dict(list(zip(old_bkids, old_bkidsigs))) 1602 nsig = dict(list(zip(new_bkids, new_bkidsigs))) 1603 1604 # The sources and dependencies we'll want to report are all stored 1605 # as relative paths to this target's directory, but we want to 1606 # report them relative to the top-level SConstruct directory, 1607 # so we only print them after running them through this lambda 1608 # to turn them into the right relative Node and then return 1609 # its string. 1610 def stringify( s, E=self.dir.Entry ) : 1611 if hasattr( s, 'dir' ) : 1612 return str(E(s)) 1613 return str(s)
1614 1615 lines = [] 1616 1617 removed = [x for x in old_bkids if not x in new_bkids] 1618 if removed: 1619 removed = list(map(stringify, removed)) 1620 fmt = "`%s' is no longer a dependency\n" 1621 lines.extend([fmt % s for s in removed]) 1622 1623 for k in new_bkids: 1624 if not k in old_bkids: 1625 lines.append("`%s' is a new dependency\n" % stringify(k)) 1626 elif _decider_map[k.changed_since_last_build](k, self, osig[k]): 1627 lines.append("`%s' changed\n" % stringify(k)) 1628 1629 if len(lines) == 0 and old_bkids != new_bkids: 1630 lines.append("the dependency order changed:\n" + 1631 "%sold: %s\n" % (' '*15, list(map(stringify, old_bkids))) + 1632 "%snew: %s\n" % (' '*15, list(map(stringify, new_bkids)))) 1633 1634 if len(lines) == 0: 1635 def fmt_with_title(title, strlines): 1636 lines = strlines.split('\n') 1637 sep = '\n' + ' '*(15 + len(title)) 1638 return ' '*15 + title + sep.join(lines) + '\n' 1639 if old.bactsig != new.bactsig: 1640 if old.bact == new.bact: 1641 lines.append("the contents of the build action changed\n" + 1642 fmt_with_title('action: ', new.bact)) 1643 1644 # lines.append("the contents of the build action changed [%s] [%s]\n"%(old.bactsig,new.bactsig) + 1645 # fmt_with_title('action: ', new.bact)) 1646 else: 1647 lines.append("the build action changed:\n" + 1648 fmt_with_title('old: ', old.bact) + 1649 fmt_with_title('new: ', new.bact)) 1650 1651 if len(lines) == 0: 1652 return "rebuilding `%s' for unknown reasons\n" % self 1653 1654 preamble = "rebuilding `%s' because" % self 1655 if len(lines) == 1: 1656 return "%s %s" % (preamble, lines[0]) 1657 else: 1658 lines = ["%s:\n" % preamble] + lines 1659 return ( ' '*11).join(lines) 1660
1661 -class NodeList(collections.UserList):
1662 - def __str__(self):
1663 return str(list(map(str, self.data)))
1664
1665 -def get_children(node, parent): return node.children()
1666 -def ignore_cycle(node, stack): pass
1667 -def do_nothing(node, parent): pass
1668
1669 -class Walker(object):
1670 """An iterator for walking a Node tree. 1671 1672 This is depth-first, children are visited before the parent. 1673 The Walker object can be initialized with any node, and 1674 returns the next node on the descent with each get_next() call. 1675 'kids_func' is an optional function that will be called to 1676 get the children of a node instead of calling 'children'. 1677 'cycle_func' is an optional function that will be called 1678 when a cycle is detected. 1679 1680 This class does not get caught in node cycles caused, for example, 1681 by C header file include loops. 1682 """
1683 - def __init__(self, node, kids_func=get_children, 1684 cycle_func=ignore_cycle, 1685 eval_func=do_nothing):
1686 self.kids_func = kids_func 1687 self.cycle_func = cycle_func 1688 self.eval_func = eval_func 1689 node.wkids = copy.copy(kids_func(node, None)) 1690 self.stack = [node] 1691 self.history = {} # used to efficiently detect and avoid cycles 1692 self.history[node] = None
1693
1694 - def get_next(self):
1695 """Return the next node for this walk of the tree. 1696 1697 This function is intentionally iterative, not recursive, 1698 to sidestep any issues of stack size limitations. 1699 """ 1700 1701 while self.stack: 1702 if self.stack[-1].wkids: 1703 node = self.stack[-1].wkids.pop(0) 1704 if not self.stack[-1].wkids: 1705 self.stack[-1].wkids = None 1706 if node in self.history: 1707 self.cycle_func(node, self.stack) 1708 else: 1709 node.wkids = copy.copy(self.kids_func(node, self.stack[-1])) 1710 self.stack.append(node) 1711 self.history[node] = None 1712 else: 1713 node = self.stack.pop() 1714 del self.history[node] 1715 if node: 1716 if self.stack: 1717 parent = self.stack[-1] 1718 else: 1719 parent = None 1720 self.eval_func(node, parent) 1721 return node 1722 return None
1723
1724 - def is_done(self):
1725 return not self.stack
1726 1727 1728 arg2nodes_lookups = [] 1729 1730 # Local Variables: 1731 # tab-width:4 1732 # indent-tabs-mode:nil 1733 # End: 1734 # vim: set expandtab tabstop=4 shiftwidth=4: 1735