Merge pull request #4674 from bdbaddog/fix_2281_Aliases_ignore_pre_post_add_actions
[scons.git] / SCons / Node / __init__.py
blob055c3090cb1279e240c1449108211028d7dfaad2
1 # MIT License
3 # Copyright The SCons Foundation
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 """The Node package for the SCons software construction utility.
26 This is, in many ways, the heart of SCons.
28 A Node is where we encapsulate all of the dependency information about
29 any thing that SCons can build, or about any thing which SCons can use
30 to build some other thing. The canonical "thing," of course, is a file,
31 but a Node can also represent something remote (like a web page) or
32 something completely abstract (like an Alias).
34 Each specific type of "thing" is specifically represented by a subclass
35 of the Node base class: Node.FS.File for files, Node.Alias for aliases,
36 etc. Dependency information is kept here in the base class, and
37 information specific to files/aliases/etc. is in the subclass. The
38 goal, if we've done this correctly, is that any type of "thing" should
39 be able to depend on any other type of "thing."
41 """
43 from __future__ import annotations
45 import collections
46 import copy
47 from itertools import chain, zip_longest
48 from typing import Any, Callable, TYPE_CHECKING
50 import SCons.Debug
51 import SCons.Executor
52 import SCons.Memoize
53 from SCons.compat import NoSlotsPyPy
54 from SCons.Debug import logInstanceCreation, Trace
55 from SCons.Executor import Executor
56 from SCons.Util import hash_signature, is_List, UniqueList, render_tree
58 if TYPE_CHECKING:
59 from SCons.Builder import BuilderBase
60 from SCons.Environment import Base as Environment
61 from SCons.Scanner import ScannerBase
62 from SCons.SConsign import SConsignEntry
64 print_duplicate = 0
66 def classname(obj):
67 return str(obj.__class__).split('.')[-1]
69 # Set to false if we're doing a dry run. There's more than one of these
70 # little treats
71 do_store_info = True
73 # Node states
75 # These are in "priority" order, so that the maximum value for any
76 # child/dependency of a node represents the state of that node if
77 # it has no builder of its own. The canonical example is a file
78 # system directory, which is only up to date if all of its children
79 # were up to date.
80 no_state = 0
81 pending = 1
82 executing = 2
83 up_to_date = 3
84 executed = 4
85 failed = 5
87 StateString = {
88 0 : "no_state",
89 1 : "pending",
90 2 : "executing",
91 3 : "up_to_date",
92 4 : "executed",
93 5 : "failed",
96 # controls whether implicit dependencies are cached:
97 implicit_cache = 0
99 # controls whether implicit dep changes are ignored:
100 implicit_deps_unchanged = 0
102 # controls whether the cached implicit deps are ignored:
103 implicit_deps_changed = 0
105 # A variable that can be set to an interface-specific function be called
106 # to annotate a Node with information about its creation.
107 def do_nothing_node(node) -> None: pass
109 Annotate = do_nothing_node
111 # global set for recording all processed SContruct/SConscript nodes
112 SConscriptNodes: set[Node] = set()
114 # Gets set to 'True' if we're running in interactive mode. Is
115 # currently used to release parts of a target's info during
116 # clean builds and update runs (see release_target_info).
117 interactive = False
119 def is_derived_none(node):
120 raise NotImplementedError
122 def is_derived_node(node) -> bool:
124 Returns true if this node is derived (i.e. built).
126 return node.has_builder() or node.side_effect
128 _is_derived_map = {0 : is_derived_none,
129 1 : is_derived_node}
131 def exists_none(node) -> bool:
132 raise NotImplementedError
134 def exists_always(node) -> bool:
135 return True
137 def exists_base(node) -> bool:
138 return node.stat() is not None
140 def exists_entry(node) -> bool:
141 """Return if the Entry exists. Check the file system to see
142 what we should turn into first. Assume a file if there's no
143 directory."""
144 node.disambiguate()
145 return _exists_map[node._func_exists](node)
148 def exists_file(node) -> bool:
149 # Duplicate from source path if we are set up to do this.
150 if node.duplicate and not node.is_derived() and not node.linked:
151 src = node.srcnode()
152 if src is not node:
153 # At this point, src is meant to be copied in a variant directory.
154 src = src.rfile()
155 if src.get_abspath() != node.get_abspath():
156 if src.exists():
157 node.do_duplicate(src)
158 # Can't return 1 here because the duplication might
159 # not actually occur if the -n option is being used.
160 else:
161 # The source file does not exist. Make sure no old
162 # copy remains in the variant directory.
163 if print_duplicate:
164 print("dup: no src for %s, unlinking old variant copy" % node)
165 if exists_base(node) or node.islink():
166 node.fs.unlink(node.get_internal_path())
167 # Return None explicitly because the Base.exists() call
168 # above will have cached its value if the file existed.
169 return None
170 return exists_base(node)
172 _exists_map = {0 : exists_none,
173 1 : exists_always,
174 2 : exists_base,
175 3 : exists_entry,
176 4 : exists_file}
179 def rexists_none(node):
180 raise NotImplementedError
182 def rexists_node(node):
183 return node.exists()
185 def rexists_base(node):
186 return node.rfile().exists()
188 _rexists_map = {0 : rexists_none,
189 1 : rexists_node,
190 2 : rexists_base}
192 def get_contents_none(node):
193 raise NotImplementedError
195 def get_contents_entry(node):
196 """Fetch the contents of the entry. Returns the exact binary
197 contents of the file."""
198 try:
199 node = node.disambiguate(must_exist=True)
200 except SCons.Errors.UserError:
201 # There was nothing on disk with which to disambiguate
202 # this entry. Leave it as an Entry, but return a null
203 # string so calls to get_contents() in emitters and the
204 # like (e.g. in qt.py) don't have to disambiguate by hand
205 # or catch the exception.
206 return ''
207 else:
208 return _get_contents_map[node._func_get_contents](node)
210 def get_contents_dir(node):
211 """Return content signatures and names of all our children
212 separated by new-lines. Ensure that the nodes are sorted."""
213 contents = []
214 for n in sorted(node.children(), key=lambda t: t.name):
215 contents.append('%s %s\n' % (n.get_csig(), n.name))
216 return ''.join(contents)
218 def get_contents_file(node):
219 if not node.rexists():
220 return b''
221 fname = node.rfile().get_abspath()
222 try:
223 with open(fname, "rb") as fp:
224 contents = fp.read()
225 except OSError as e:
226 if not e.filename:
227 e.filename = fname
228 raise
229 return contents
231 _get_contents_map = {0 : get_contents_none,
232 1 : get_contents_entry,
233 2 : get_contents_dir,
234 3 : get_contents_file}
236 def target_from_source_none(node, prefix, suffix, splitext):
237 raise NotImplementedError
239 def target_from_source_base(node, prefix, suffix, splitext):
240 return node.dir.Entry(prefix + splitext(node.name)[0] + suffix)
242 _target_from_source_map = {0 : target_from_source_none,
243 1 : target_from_source_base}
246 # The new decider subsystem for Nodes
248 # We would set and overwrite the changed_since_last_build function
249 # before, but for being able to use slots (less memory!) we now have
250 # a dictionary of the different decider functions. Then in the Node
251 # subclasses we simply store the index to the decider that should be
252 # used by it.
256 # First, the single decider functions
258 def changed_since_last_build_node(node, target, prev_ni, repo_node=None) -> bool:
261 Must be overridden in a specific subclass to return True if this
262 Node (a dependency) has changed since the last time it was used
263 to build the specified target. prev_ni is this Node's state (for
264 example, its file timestamp, length, maybe content signature)
265 as of the last time the target was built.
267 Note that this method is called through the dependency, not the
268 target, because a dependency Node must be able to use its own
269 logic to decide if it changed. For example, File Nodes need to
270 obey if we're configured to use timestamps, but Python Value Nodes
271 never use timestamps and always use the content. If this method
272 were called through the target, then each Node's implementation
273 of this method would have to have more complicated logic to
274 handle all the different Node types on which it might depend.
276 raise NotImplementedError
279 def changed_since_last_build_alias(node, target, prev_ni, repo_node=None) -> bool:
280 cur_csig = node.get_csig()
281 try:
282 return cur_csig != prev_ni.csig
283 except AttributeError:
284 return True
287 def changed_since_last_build_entry(node, target, prev_ni, repo_node=None) -> bool:
288 node.disambiguate()
289 return _decider_map[node.changed_since_last_build](node, target, prev_ni, repo_node)
292 def changed_since_last_build_state_changed(node, target, prev_ni, repo_node=None) -> bool:
293 return node.state != SCons.Node.up_to_date
296 def decide_source(node, target, prev_ni, repo_node=None) -> bool:
297 return target.get_build_env().decide_source(node, target, prev_ni, repo_node)
300 def decide_target(node, target, prev_ni, repo_node=None) -> bool:
301 return target.get_build_env().decide_target(node, target, prev_ni, repo_node)
304 def changed_since_last_build_python(node, target, prev_ni, repo_node=None) -> bool:
305 cur_csig = node.get_csig()
306 try:
307 return cur_csig != prev_ni.csig
308 except AttributeError:
309 return True
313 # Now, the mapping from indices to decider functions
315 _decider_map = {0 : changed_since_last_build_node,
316 1 : changed_since_last_build_alias,
317 2 : changed_since_last_build_entry,
318 3 : changed_since_last_build_state_changed,
319 4 : decide_source,
320 5 : decide_target,
321 6 : changed_since_last_build_python}
323 do_store_info = True
326 # The new store_info subsystem for Nodes
328 # We would set and overwrite the store_info function
329 # before, but for being able to use slots (less memory!) we now have
330 # a dictionary of the different functions. Then in the Node
331 # subclasses we simply store the index to the info method that should be
332 # used by it.
336 # First, the single info functions
339 def store_info_pass(node) -> None:
340 pass
342 def store_info_file(node) -> None:
343 # Merge our build information into the already-stored entry.
344 # This accommodates "chained builds" where a file that's a target
345 # in one build (SConstruct file) is a source in a different build.
346 # See test/chained-build.py for the use case.
347 if do_store_info:
348 node.dir.sconsign().store_info(node.name, node)
351 store_info_map = {0 : store_info_pass,
352 1 : store_info_file}
354 # Classes for signature info for Nodes.
356 class NodeInfoBase:
358 The generic base class for signature information for a Node.
360 Node subclasses should subclass NodeInfoBase to provide their own
361 logic for dealing with their own Node-specific signature information.
363 __slots__ = ('__weakref__',)
364 current_version_id = 2
366 def update(self, node: Node) -> None:
367 try:
368 field_list = self.field_list
369 except AttributeError:
370 return
371 for f in field_list:
372 try:
373 delattr(self, f)
374 except AttributeError:
375 pass
376 try:
377 func = getattr(node, 'get_' + f)
378 except AttributeError:
379 pass
380 else:
381 setattr(self, f, func())
383 def convert(self, node, val) -> None:
384 pass
386 def merge(self, other: NodeInfoBase) -> None:
388 Merge the fields of another object into this object. Already existing
389 information is overwritten by the other instance's data.
390 WARNING: If a '__dict__' slot is added, it should be updated instead of
391 replaced.
393 state = other.__getstate__()
394 self.__setstate__(state)
396 def format(self, field_list: list[str] | None = None, names: bool = False):
397 if field_list is None:
398 try:
399 field_list = self.field_list
400 except AttributeError:
401 field_list = list(getattr(self, '__dict__', {}).keys())
402 for obj in type(self).mro():
403 for slot in getattr(obj, '__slots__', ()):
404 if slot not in ('__weakref__', '__dict__'):
405 field_list.append(slot)
406 field_list.sort()
407 fields = []
408 for field in field_list:
409 try:
410 f = getattr(self, field)
411 except AttributeError:
412 f = None
413 f = str(f)
414 if names:
415 f = field + ': ' + f
416 fields.append(f)
417 return fields
419 def __getstate__(self) -> dict[str, Any]:
421 Return all fields that shall be pickled. Walk the slots in the class
422 hierarchy and add those to the state dictionary. If a '__dict__' slot is
423 available, copy all entries to the dictionary. Also include the version
424 id, which is fixed for all instances of a class.
426 state = getattr(self, '__dict__', {}).copy()
427 for obj in type(self).mro():
428 for name in getattr(obj,'__slots__',()):
429 if hasattr(self, name):
430 state[name] = getattr(self, name)
432 state['_version_id'] = self.current_version_id
433 try:
434 del state['__weakref__']
435 except KeyError:
436 pass
437 return state
439 def __setstate__(self, state: dict[str, Any]) -> None:
441 Restore the attributes from a pickled state. The version is discarded.
443 # TODO check or discard version
444 del state['_version_id']
446 for key, value in state.items():
447 if key not in ('__weakref__',):
448 setattr(self, key, value)
451 class BuildInfoBase:
453 The generic base class for build information for a Node.
455 This is what gets stored in a .sconsign file for each target file.
456 It contains a NodeInfo instance for this node (signature information
457 that's specific to the type of Node) and direct attributes for the
458 generic build stuff we have to track: sources, explicit dependencies,
459 implicit dependencies, and action information.
461 __slots__ = ("bsourcesigs", "bdependsigs", "bimplicitsigs", "bactsig",
462 "bsources", "bdepends", "bact", "bimplicit", "__weakref__")
463 current_version_id = 2
465 def __init__(self) -> None:
466 # Create an object attribute from the class attribute so it ends up
467 # in the pickled data in the .sconsign file.
468 self.bsourcesigs: list[BuildInfoBase] = []
469 self.bdependsigs: list[BuildInfoBase] = []
470 self.bimplicitsigs: list[BuildInfoBase] = []
471 self.bactsig: str | None = None
473 def merge(self, other: BuildInfoBase) -> None:
475 Merge the fields of another object into this object. Already existing
476 information is overwritten by the other instance's data.
477 WARNING: If a '__dict__' slot is added, it should be updated instead of
478 replaced.
480 state = other.__getstate__()
481 self.__setstate__(state)
483 def __getstate__(self) -> dict[str, Any]:
485 Return all fields that shall be pickled. Walk the slots in the class
486 hierarchy and add those to the state dictionary. If a '__dict__' slot is
487 available, copy all entries to the dictionary. Also include the version
488 id, which is fixed for all instances of a class.
490 state = getattr(self, '__dict__', {}).copy()
491 for obj in type(self).mro():
492 for name in getattr(obj,'__slots__',()):
493 if hasattr(self, name):
494 state[name] = getattr(self, name)
496 state['_version_id'] = self.current_version_id
497 try:
498 del state['__weakref__']
499 except KeyError:
500 pass
501 return state
503 def __setstate__(self, state: dict[str, Any]) -> None:
505 Restore the attributes from a pickled state.
507 # TODO check or discard version
508 del state['_version_id']
509 for key, value in state.items():
510 if key not in ('__weakref__',):
511 setattr(self, key, value)
514 class Node(metaclass=NoSlotsPyPy):
515 """The base Node class, for entities that we know how to
516 build, or use to build other Nodes.
519 __slots__ = ['sources',
520 'sources_set',
521 'target_peers',
522 '_specific_sources',
523 'depends',
524 'depends_set',
525 'ignore',
526 'ignore_set',
527 'prerequisites',
528 'implicit',
529 'waiting_parents',
530 'waiting_s_e',
531 'ref_count',
532 'wkids',
533 'env',
534 'state',
535 'precious',
536 'noclean',
537 'nocache',
538 'cached',
539 'always_build',
540 'includes',
541 'attributes',
542 'side_effect',
543 'side_effects',
544 'linked',
545 '_memo',
546 'executor',
547 'binfo',
548 'ninfo',
549 'builder',
550 'is_explicit',
551 'implicit_set',
552 'changed_since_last_build',
553 'store_info',
554 'pseudo',
555 '_tags',
556 '_func_is_derived',
557 '_func_exists',
558 '_func_rexists',
559 '_func_get_contents',
560 '_func_target_from_source']
562 class Attrs:
563 __slots__ = ('shared', '__dict__')
566 def __init__(self) -> None:
567 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.Node')
568 # Note that we no longer explicitly initialize a self.builder
569 # attribute to None here. That's because the self.builder
570 # attribute may be created on-the-fly later by a subclass (the
571 # canonical example being a builder to fetch a file from a
572 # source code system like CVS or Subversion).
574 # Each list of children that we maintain is accompanied by a
575 # dictionary used to look up quickly whether a node is already
576 # present in the list. Empirical tests showed that it was
577 # fastest to maintain them as side-by-side Node attributes in
578 # this way, instead of wrapping up each list+dictionary pair in
579 # a class. (Of course, we could always still do that in the
580 # future if we had a good reason to...).
581 self.sources: list[Node] = [] # source files used to build node
582 self.sources_set: set[Node] = set()
583 self._specific_sources = False
584 self.depends: list[Node] = [] # explicit dependencies (from Depends)
585 self.depends_set: set[Node] = set()
586 self.ignore: list[Node] = [] # dependencies to ignore
587 self.ignore_set: set[Node] = set()
588 self.prerequisites: UniqueList | None = None
589 self.implicit: list[Node] | None = None # implicit (scanned) dependencies (None means not scanned yet)
590 self.waiting_parents: set[Node] = set()
591 self.waiting_s_e: set[Node] = set()
592 self.ref_count = 0
593 self.wkids: list[Node] | None = None # Kids yet to walk, when it's an array
595 self.env: Environment | None = None
596 self.state = no_state
597 self.precious = False
598 self.pseudo = False
599 self.noclean = False
600 self.nocache = False
601 self.cached = False # is this node pulled from cache?
602 self.always_build = False
603 self.includes: list[str] | None = None
604 self.attributes = self.Attrs() # Generic place to stick information about the Node.
605 self.side_effect = False # true iff this node is a side effect
606 self.side_effects: list[Node] = [] # the side effects of building this target
607 self.linked = False # is this node linked to the variant directory?
608 self.changed_since_last_build = 0 # Index for "_decider_map".
609 self.store_info = 0 # Index for "store_info_map".
610 self._tags: dict[str, Any] | None = None
611 self._func_is_derived = 1 # Index for "_is_derived_map".
612 self._func_exists = 1 # Index for "_exists_map"
613 self._func_rexists = 1 # Index for "_rexists_map"
614 self._func_get_contents = 0 # Index for "_get_contents_map"
615 self._func_target_from_source = 0 # Index for "_target_from_source_map"
616 self.ninfo: NodeInfoBase | None = None
618 self.clear_memoized_values()
620 # Let the interface in which the build engine is embedded
621 # annotate this Node with its own info (like a description of
622 # what line in what file created the node, for example).
623 Annotate(self)
625 def disambiguate(self, must_exist: bool = False):
626 return self
628 def get_suffix(self) -> str:
629 return ''
631 @SCons.Memoize.CountMethodCall
632 def get_build_env(self) -> Environment:
633 """Fetch the appropriate Environment to build this node.
635 try:
636 return self._memo['get_build_env']
637 except KeyError:
638 pass
639 result = self.get_executor().get_build_env()
640 self._memo['get_build_env'] = result
641 return result
643 def get_build_scanner_path(self, scanner: ScannerBase):
644 """Fetch the appropriate scanner path for this node."""
645 return self.get_executor().get_build_scanner_path(scanner)
647 def set_executor(self, executor: Executor) -> None:
648 """Set the action executor for this node."""
649 self.executor = executor
651 def get_executor(self, create: bool = True) -> Executor:
652 """Fetch the action executor for this node. Create one if
653 there isn't already one, and requested to do so."""
654 try:
655 executor = self.executor
656 except AttributeError:
657 if not create:
658 raise
659 try:
660 act = self.builder.action
661 except AttributeError:
662 executor = SCons.Executor.Null(targets=[self]) # type: ignore[assignment]
663 else:
664 executor = SCons.Executor.Executor(act,
665 self.env or self.builder.env,
666 [self.builder.overrides],
667 [self],
668 self.sources)
669 self.executor = executor
670 return executor
672 def executor_cleanup(self) -> None:
673 """Let the executor clean up any cached information."""
674 try:
675 executor = self.get_executor(create=False)
676 except AttributeError:
677 pass
678 else:
679 if executor is not None:
680 executor.cleanup()
682 def reset_executor(self) -> None:
683 """Remove cached executor; forces recompute when needed."""
684 try:
685 delattr(self, 'executor')
686 except AttributeError:
687 pass
689 def push_to_cache(self) -> bool:
690 """Try to push a node into a cache
692 return False
694 def retrieve_from_cache(self) -> bool:
695 """Try to retrieve the node's content from a cache
697 This method is called from multiple threads in a parallel build,
698 so only do thread safe stuff here. Do thread unsafe stuff
699 in :meth:`built`.
701 Returns true if the node was successfully retrieved.
703 return False
706 # Taskmaster interface subsystem
709 def make_ready(self) -> None:
710 """Get a Node ready for evaluation.
712 This is called before the Taskmaster decides if the Node is
713 up-to-date or not. Overriding this method allows for a Node
714 subclass to be disambiguated if necessary, or for an implicit
715 source builder to be attached.
717 pass
719 def prepare(self) -> None:
720 """Prepare for this Node to be built.
722 This is called after the Taskmaster has decided that the Node
723 is out-of-date and must be rebuilt, but before actually calling
724 the method to build the Node.
726 This default implementation checks that explicit or implicit
727 dependencies either exist or are derived, and initializes the
728 BuildInfo structure that will hold the information about how
729 this node is, uh, built.
731 (The existence of source files is checked separately by the
732 Executor, which aggregates checks for all of the targets built
733 by a specific action.)
735 Overriding this method allows for for a Node subclass to remove
736 the underlying file from the file system. Note that subclass
737 methods should call this base class method to get the child
738 check and the BuildInfo structure.
740 if self.depends is not None:
741 for d in self.depends:
742 if d.missing():
743 msg = "Explicit dependency `%s' not found, needed by target `%s'."
744 raise SCons.Errors.StopError(msg % (d, self))
745 if self.implicit is not None:
746 for i in self.implicit:
747 if i.missing():
748 msg = "Implicit dependency `%s' not found, needed by target `%s'."
749 raise SCons.Errors.StopError(msg % (i, self))
750 self.binfo = self.get_binfo()
752 def build(self, **kw) -> None:
753 """Actually build the node.
755 This is called by the Taskmaster after it's decided that the
756 Node is out-of-date and must be rebuilt, and after the
757 :meth:`prepare` method has gotten everything, uh, prepared.
759 This method is called from multiple threads in a parallel build,
760 so only do thread safe stuff here. Do thread unsafe stuff
761 in :meth:`built`.
764 try:
765 self.get_executor()(self, **kw)
766 except SCons.Errors.BuildError as e:
767 e.node = self
768 raise
770 def built(self) -> None:
771 """Called just after this node is successfully built."""
773 # Clear the implicit dependency caches of any Nodes
774 # waiting for this Node to be built.
775 for parent in self.waiting_parents:
776 parent.implicit = None
778 # Handle issue where builder emits more than one target and
779 # the source file for the builder is generated.
780 # in that case only the first target was getting it's .implicit
781 # cleared when the source file is built (second scan).
782 # leaving only partial implicits from scan before source file is generated
783 # typically the compiler only. Then scanned files are appended
784 # This is persisted to sconsign and rebuild causes false rebuilds
785 # because the ordering of the implicit list then changes to what it
786 # should have been.
787 # This is at least the following bugs
788 # https://github.com/SCons/scons/issues/2811
789 # https://jira.mongodb.org/browse/SERVER-33111
790 try:
791 for peer in parent.target_peers:
792 peer.implicit = None
793 except AttributeError:
794 pass
796 self.clear()
798 if self.pseudo:
799 if self.exists():
800 raise SCons.Errors.UserError("Pseudo target " + str(self) + " must not exist")
801 else:
802 if not self.exists() and do_store_info:
803 SCons.Warnings.warn(SCons.Warnings.TargetNotBuiltWarning,
804 "Cannot find target " + str(self) + " after building")
805 self.ninfo.update(self)
807 def visited(self) -> None:
808 """Called just after this node has been visited (with or
809 without a build)."""
810 try:
811 binfo = self.binfo
812 except AttributeError:
813 # Apparently this node doesn't need build info, so
814 # don't bother calculating or storing it.
815 pass
816 else:
817 self.ninfo.update(self)
818 SCons.Node.store_info_map[self.store_info](self)
820 def release_target_info(self) -> None:
821 """Called just after this node has been marked
822 up-to-date or was built completely.
824 This is where we try to release as many target node infos
825 as possible for clean builds and update runs, in order
826 to minimize the overall memory consumption.
828 By purging attributes that aren't needed any longer after
829 a Node (=File) got built, we don't have to care that much how
830 many KBytes a Node actually requires...as long as we free
831 the memory shortly afterwards.
833 @see: built() and File.release_target_info()
835 pass
837 def add_to_waiting_s_e(self, node: Node) -> None:
838 self.waiting_s_e.add(node)
840 def add_to_waiting_parents(self, node: Node) -> int:
842 Returns the number of nodes added to our waiting parents list:
843 1 if we add a unique waiting parent, 0 if not. (Note that the
844 returned values are intended to be used to increment a reference
845 count, so don't think you can "clean up" this function by using
846 True and False instead...)
848 wp = self.waiting_parents
849 if node in wp:
850 return 0
851 wp.add(node)
852 return 1
854 def postprocess(self) -> None:
855 """Clean up anything we don't need to hang onto after we've
856 been built."""
857 self.executor_cleanup()
858 self.waiting_parents = set()
860 def clear(self) -> None:
861 """Completely clear a Node of all its cached state (so that it
862 can be re-evaluated by interfaces that do continuous integration
863 builds).
865 # The del_binfo() call here isn't necessary for normal execution,
866 # but is for interactive mode, where we might rebuild the same
867 # target and need to start from scratch.
868 self.del_binfo()
869 self.clear_memoized_values()
870 self.ninfo = self.new_ninfo()
871 self.executor_cleanup()
872 for attr in ['cachedir_csig', 'cachesig', 'contentsig']:
873 try:
874 delattr(self, attr)
875 except AttributeError:
876 pass
877 self.cached = False
878 self.includes = None
880 def clear_memoized_values(self) -> None:
881 self._memo = {}
883 def builder_set(self, builder: BuilderBase | None) -> None:
884 self.builder = builder
885 try:
886 del self.executor
887 except AttributeError:
888 pass
890 def has_builder(self) -> bool:
891 """Return whether this Node has a builder or not.
893 In Boolean tests, this turns out to be a *lot* more efficient
894 than simply examining the builder attribute directly ("if
895 node.builder: ..."). When the builder attribute is examined
896 directly, it ends up calling __getattr__ for both the __len__
897 and __bool__ attributes on instances of our Builder Proxy
898 class(es), generating a bazillion extra calls and slowing
899 things down immensely.
901 try:
902 b = self.builder
903 except AttributeError:
904 # There was no explicit builder for this Node, so initialize
905 # the self.builder attribute to None now.
906 b = self.builder = None
907 return b is not None
909 def set_explicit(self, is_explicit: bool) -> None:
910 self.is_explicit = is_explicit
912 def has_explicit_builder(self) -> bool:
913 """Return whether this Node has an explicit builder.
915 This allows an internal Builder created by SCons to be marked
916 non-explicit, so that it can be overridden by an explicit
917 builder that the user supplies (the canonical example being
918 directories)."""
919 try:
920 return self.is_explicit
921 except AttributeError:
922 self.is_explicit = False
923 return False
925 def get_builder(self, default_builder: BuilderBase | None = None) -> BuilderBase | None:
926 """Return the set builder, or a specified default value"""
927 try:
928 return self.builder
929 except AttributeError:
930 return default_builder
932 multiple_side_effect_has_builder = has_builder
934 def is_derived(self) -> bool:
936 Returns true if this node is derived (i.e. built).
938 This should return true only for nodes whose path should be in
939 the variant directory when duplicate=0 and should contribute their build
940 signatures when they are used as source files to other derived files. For
941 example: source with source builders are not derived in this sense,
942 and hence should not return true.
944 return _is_derived_map[self._func_is_derived](self)
946 def is_sconscript(self) -> bool:
947 """ Returns true if this node is an sconscript """
948 return self in SConscriptNodes
950 def is_conftest(self) -> bool:
951 """ Returns true if this node is an conftest node"""
952 try:
953 self.attributes.conftest_node
954 except AttributeError:
955 return False
956 return True
958 def check_attributes(self, name: str) -> Any | None:
959 """ Simple API to check if the node.attributes for name has been set"""
960 return getattr(getattr(self, "attributes", None), name, None)
963 def alter_targets(self):
964 """Return a list of alternate targets for this Node.
966 return [], None
968 def get_found_includes(self, env: Environment, scanner: ScannerBase | None, path) -> list[Node]:
969 """Return the scanned include lines (implicit dependencies)
970 found in this node.
972 The default is no implicit dependencies. We expect this method
973 to be overridden by any subclass that can be scanned for
974 implicit dependencies.
976 return []
978 def get_implicit_deps(self, env: Environment, initial_scanner: ScannerBase | None, path_func, kw = {}) -> list[Node]:
979 """Return a list of implicit dependencies for this node.
981 This method exists to handle recursive invocation of the scanner
982 on the implicit dependencies returned by the scanner, if the
983 scanner's recursive flag says that we should.
985 nodes = [self]
986 seen = set(nodes)
987 dependencies = []
988 path_memo = {}
990 root_node_scanner = self._get_scanner(env, initial_scanner, None, kw)
992 while nodes:
993 node = nodes.pop(0)
995 scanner = node._get_scanner(env, initial_scanner, root_node_scanner, kw)
996 if not scanner:
997 continue
999 try:
1000 path = path_memo[scanner]
1001 except KeyError:
1002 path = path_func(scanner)
1003 path_memo[scanner] = path
1005 included_deps = [x for x in node.get_found_includes(env, scanner, path) if x not in seen]
1006 if included_deps:
1007 dependencies.extend(included_deps)
1008 seen.update(included_deps)
1009 nodes.extend(scanner.recurse_nodes(included_deps))
1011 return dependencies
1013 def _get_scanner(self, env: Environment, initial_scanner: ScannerBase | None, root_node_scanner: ScannerBase | None, kw: dict[str, Any] | None) -> ScannerBase | None:
1014 if initial_scanner:
1015 # handle explicit scanner case
1016 scanner = initial_scanner.select(self)
1017 else:
1018 # handle implicit scanner case
1019 scanner = self.get_env_scanner(env, kw)
1020 if scanner:
1021 scanner = scanner.select(self)
1023 if not scanner:
1024 # no scanner could be found for the given node's scanner key;
1025 # thus, make an attempt at using a default.
1026 scanner = root_node_scanner
1028 return scanner
1030 def get_env_scanner(self, env: Environment, kw: dict[str, Any] | None = {}) -> ScannerBase | None:
1031 return env.get_scanner(self.scanner_key())
1033 def get_target_scanner(self) -> ScannerBase | None:
1034 return self.builder.target_scanner
1036 def get_source_scanner(self, node: Node) -> ScannerBase | None:
1037 """Fetch the source scanner for the specified node
1039 NOTE: "self" is the target being built, "node" is
1040 the source file for which we want to fetch the scanner.
1042 Implies self.has_builder() is true; again, expect to only be
1043 called from locations where this is already verified.
1045 This function may be called very often; it attempts to cache
1046 the scanner found to improve performance.
1048 scanner = None
1049 try:
1050 scanner = self.builder.source_scanner
1051 except AttributeError:
1052 pass
1053 if not scanner:
1054 # The builder didn't have an explicit scanner, so go look up
1055 # a scanner from env['SCANNERS'] based on the node's scanner
1056 # key (usually the file extension).
1057 scanner = self.get_env_scanner(self.get_build_env())
1058 if scanner:
1059 scanner = scanner.select(node)
1060 return scanner
1062 def add_to_implicit(self, deps: list[Node]) -> None:
1063 if not hasattr(self, 'implicit') or self.implicit is None:
1064 self.implicit = []
1065 self.implicit_set: set[Node] = set()
1066 self._children_reset()
1067 self._add_child(self.implicit, self.implicit_set, deps)
1069 def scan(self) -> None:
1070 """Scan this node's dependents for implicit dependencies."""
1071 # Don't bother scanning non-derived files, because we don't
1072 # care what their dependencies are.
1073 # Don't scan again, if we already have scanned.
1074 T = False
1075 if self.implicit is not None:
1076 return
1077 self.implicit = []
1078 self.implicit_set = set()
1079 self._children_reset()
1080 if not self.has_builder():
1081 return
1083 build_env = self.get_build_env()
1084 executor = self.get_executor()
1086 # Here's where we implement --implicit-cache.
1087 if implicit_cache and not implicit_deps_changed:
1088 implicit = self.get_stored_implicit()
1089 if implicit is not None:
1090 # We now add the implicit dependencies returned from the
1091 # stored .sconsign entry to have already been converted
1092 # to Nodes for us. (We used to run them through a
1093 # source_factory function here.)
1095 # Update all of the targets with them. This
1096 # essentially short-circuits an N*M scan of the
1097 # sources for each individual target, which is a hell
1098 # of a lot more efficient.
1099 def print_nodelist(n):
1100 tgts = [f"{t.path!r}" for t in n]
1101 return f"[{', '.join(tgts)}]"
1103 for tgt in executor.get_all_targets():
1104 if T: Trace(f"adding implicit {print_nodelist(implicit)} to {tgt!s}\n")
1105 tgt.add_to_implicit(implicit)
1107 if implicit_deps_unchanged or self.is_up_to_date():
1108 return
1109 # one of this node's sources has changed,
1110 # so we must recalculate the implicit deps for all targets
1111 for tgt in executor.get_all_targets():
1112 tgt.implicit = []
1113 tgt.implicit_set = set()
1115 # Have the executor scan the sources.
1116 executor.scan_sources(self.builder.source_scanner)
1118 # If there's a target scanner, have the executor scan the target
1119 # node itself and associated targets that might be built.
1120 scanner = self.get_target_scanner()
1121 if scanner:
1122 executor.scan_targets(scanner)
1124 def scanner_key(self) -> str | None:
1125 return None
1127 def select_scanner(self, scanner: ScannerBase) -> ScannerBase | None:
1128 """Selects a scanner for this Node.
1130 This is a separate method so it can be overridden by Node
1131 subclasses (specifically, Node.FS.Dir) that *must* use their
1132 own Scanner and don't select one the Scanner.Selector that's
1133 configured for the target.
1135 return scanner.select(self)
1137 def env_set(self, env: Environment, safe: bool = False) -> None:
1138 if safe and self.env:
1139 return
1140 self.env = env
1143 # SIGNATURE SUBSYSTEM
1146 NodeInfo = NodeInfoBase
1147 BuildInfo = BuildInfoBase
1149 def new_ninfo(self) -> NodeInfoBase:
1150 ninfo = self.NodeInfo()
1151 return ninfo
1153 def get_ninfo(self) -> NodeInfoBase:
1154 if self.ninfo is not None:
1155 return self.ninfo
1156 self.ninfo = self.new_ninfo()
1157 return self.ninfo
1159 def new_binfo(self) -> BuildInfoBase:
1160 binfo = self.BuildInfo()
1161 return binfo
1163 def get_binfo(self) -> BuildInfoBase:
1165 Fetch a node's build information.
1167 node - the node whose sources will be collected
1168 cache - alternate node to use for the signature cache
1169 returns - the build signature
1171 This no longer handles the recursive descent of the
1172 node's children's signatures. We expect that they're
1173 already built and updated by someone else, if that's
1174 what's wanted.
1176 try:
1177 return self.binfo
1178 except AttributeError:
1179 pass
1181 binfo = self.new_binfo()
1182 self.binfo = binfo
1184 executor = self.get_executor()
1185 ignore_set = self.ignore_set
1187 if self.has_builder():
1188 binfo.bact = str(executor)
1189 binfo.bactsig = hash_signature(executor.get_contents())
1191 if self._specific_sources:
1192 sources = [s for s in self.sources if s not in ignore_set]
1194 else:
1195 sources = executor.get_unignored_sources(self, self.ignore)
1197 seen = set()
1198 binfo.bsources = [s for s in sources if s not in seen and not seen.add(s)]
1199 binfo.bsourcesigs = [s.get_ninfo() for s in binfo.bsources]
1201 binfo.bdepends = [d for d in self.depends if d not in ignore_set]
1202 binfo.bdependsigs = [d.get_ninfo() for d in self.depends]
1204 # Because self.implicit is initialized to None (and not empty list [])
1205 # we have to handle this case
1206 if not self.implicit:
1207 binfo.bimplicit = []
1208 binfo.bimplicitsigs = []
1209 else:
1210 binfo.bimplicit = [i for i in self.implicit if i not in ignore_set]
1211 binfo.bimplicitsigs = [i.get_ninfo() for i in binfo.bimplicit]
1213 return binfo
1215 def del_binfo(self) -> None:
1216 """Delete the build info from this node."""
1217 try:
1218 delattr(self, 'binfo')
1219 except AttributeError:
1220 pass
1222 def get_csig(self) -> str:
1223 try:
1224 return self.ninfo.csig
1225 except AttributeError:
1226 ninfo = self.get_ninfo()
1227 ninfo.csig = hash_signature(self.get_contents())
1228 return self.ninfo.csig
1230 def get_cachedir_csig(self) -> str:
1231 return self.get_csig()
1233 def get_stored_info(self) -> SConsignEntry | None:
1234 return None
1236 def get_stored_implicit(self) -> list[Node] | None:
1237 """Fetch the stored implicit dependencies"""
1238 return None
1244 def set_precious(self, precious: bool = True) -> None:
1245 """Set the Node's precious value."""
1246 self.precious = precious
1248 def set_pseudo(self, pseudo: bool = True) -> None:
1249 """Set the Node's pseudo value."""
1250 self.pseudo = pseudo
1252 def set_noclean(self, noclean: bool = True) -> None:
1253 """Set the Node's noclean value."""
1254 self.noclean = noclean
1256 def set_nocache(self, nocache: bool = True) -> None:
1257 """Set the Node's nocache value."""
1258 self.nocache = nocache
1260 def set_always_build(self, always_build: bool = True) -> None:
1261 """Set the Node's always_build value."""
1262 self.always_build = always_build
1264 def exists(self) -> bool:
1265 """Reports whether node exists."""
1266 return _exists_map[self._func_exists](self)
1268 def rexists(self) -> bool:
1269 """Does this node exist locally or in a repository?"""
1270 # There are no repositories by default:
1271 return _rexists_map[self._func_rexists](self)
1273 def get_contents(self) -> bytes | str:
1274 """Fetch the contents of the entry."""
1275 return _get_contents_map[self._func_get_contents](self)
1277 def missing(self) -> bool:
1278 return not self.is_derived() and \
1279 not self.linked and \
1280 not self.rexists()
1282 def remove(self) -> None:
1283 """Remove this Node: no-op by default."""
1284 return None
1286 def add_dependency(self, depend: list[Node]) -> None:
1287 """Adds dependencies."""
1288 try:
1289 self._add_child(self.depends, self.depends_set, depend)
1290 except TypeError as e:
1291 e = e.args[0]
1292 if is_List(e):
1293 s = list(map(str, e))
1294 else:
1295 s = str(e)
1296 raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1298 def add_prerequisite(self, prerequisite: list[Node]) -> None:
1299 """Adds prerequisites"""
1300 if self.prerequisites is None:
1301 self.prerequisites = UniqueList()
1302 self.prerequisites.extend(prerequisite)
1303 self._children_reset()
1305 def add_ignore(self, depend: list[Node]) -> None:
1306 """Adds dependencies to ignore."""
1307 try:
1308 self._add_child(self.ignore, self.ignore_set, depend)
1309 except TypeError as e:
1310 e = e.args[0]
1311 if is_List(e):
1312 s = list(map(str, e))
1313 else:
1314 s = str(e)
1315 raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1317 def add_source(self, source: list[Node]) -> None:
1318 """Adds sources."""
1319 if self._specific_sources:
1320 return
1321 try:
1322 self._add_child(self.sources, self.sources_set, source)
1323 except TypeError as e:
1324 e = e.args[0]
1325 if is_List(e):
1326 s = list(map(str, e))
1327 else:
1328 s = str(e)
1329 raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
1331 def _add_child(self, collection: list[Node], set: set[Node], child: list[Node]) -> None:
1332 """Adds 'child' to 'collection', first checking 'set' to see if it's
1333 already present."""
1334 added = None
1335 for c in child:
1336 if c not in set:
1337 set.add(c)
1338 collection.append(c)
1339 added = 1
1340 if added:
1341 self._children_reset()
1343 def set_specific_source(self, source: list[Node]) -> None:
1344 self.add_source(source)
1345 self._specific_sources = True
1347 def add_wkid(self, wkid: Node) -> None:
1348 """Add a node to the list of kids waiting to be evaluated"""
1349 if self.wkids is not None:
1350 self.wkids.append(wkid)
1352 def _children_reset(self) -> None:
1353 self.clear_memoized_values()
1354 # We need to let the Executor clear out any calculated
1355 # build info that it's cached so we can re-calculate it.
1356 self.executor_cleanup()
1358 @SCons.Memoize.CountMethodCall
1359 def _children_get(self) -> list[Node]:
1360 try:
1361 return self._memo['_children_get']
1362 except KeyError:
1363 pass
1365 # The return list may contain duplicate Nodes, especially in
1366 # source trees where there are a lot of repeated #includes
1367 # of a tangle of .h files. Profiling shows, however, that
1368 # eliminating the duplicates with a brute-force approach that
1369 # preserves the order (that is, something like:
1371 # u = []
1372 # for n in list:
1373 # if n not in u:
1374 # u.append(n)"
1376 # takes more cycles than just letting the underlying methods
1377 # hand back cached values if a Node's information is requested
1378 # multiple times. (Other methods of removing duplicates, like
1379 # using dictionary keys, lose the order, and the only ordered
1380 # dictionary patterns I found all ended up using "not in"
1381 # internally anyway...)
1382 if self.ignore_set:
1383 iter = chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f])
1385 children = []
1386 for i in iter:
1387 if i not in self.ignore_set:
1388 children.append(i)
1389 else:
1390 children = self.all_children(scan=False)
1392 self._memo['_children_get'] = children
1393 return children
1395 def all_children(self, scan: bool = True) -> list[Node]:
1396 """Return a list of all the node's direct children."""
1397 if scan:
1398 self.scan()
1400 # The return list may contain duplicate Nodes, especially in
1401 # source trees where there are a lot of repeated #includes
1402 # of a tangle of .h files. Profiling shows, however, that
1403 # eliminating the duplicates with a brute-force approach that
1404 # preserves the order (that is, something like:
1406 # u = []
1407 # for n in list:
1408 # if n not in u:
1409 # u.append(n)"
1411 # takes more cycles than just letting the underlying methods
1412 # hand back cached values if a Node's information is requested
1413 # multiple times. (Other methods of removing duplicates, like
1414 # using dictionary keys, lose the order, and the only ordered
1415 # dictionary patterns I found all ended up using "not in"
1416 # internally anyway...)
1417 return list(chain.from_iterable([_f for _f in [self.sources, self.depends, self.implicit] if _f]))
1419 def children(self, scan: bool = True) -> list[Node]:
1420 """Return a list of the node's direct children, minus those
1421 that are ignored by this node."""
1422 if scan:
1423 self.scan()
1424 return self._children_get()
1426 def set_state(self, state: int) -> None:
1427 self.state = state
1429 def get_state(self) -> int:
1430 return self.state
1432 def get_env(self) -> Environment:
1433 env = self.env
1434 if not env:
1435 import SCons.Defaults
1436 env = SCons.Defaults.DefaultEnvironment()
1437 return env
1439 def Decider(self, function: Callable[[Node, Node, NodeInfoBase, Node | None], bool]) -> None:
1440 foundkey = None
1441 for k, v in _decider_map.items():
1442 if v == function:
1443 foundkey = k
1444 break
1445 if not foundkey:
1446 foundkey = len(_decider_map)
1447 _decider_map[foundkey] = function
1448 self.changed_since_last_build = foundkey
1450 def Tag(self, key: str, value: Any | None) -> None:
1451 """ Add a user-defined tag. """
1452 if not self._tags:
1453 self._tags = {}
1454 self._tags[key] = value
1456 def GetTag(self, key: str) -> Any | None:
1457 """ Return a user-defined tag. """
1458 if not self._tags:
1459 return None
1460 return self._tags.get(key, None)
1462 def changed(self, node: Node | None = None, allowcache: bool = False) -> bool:
1464 Returns if the node is up-to-date with respect to the BuildInfo
1465 stored last time it was built. The default behavior is to compare
1466 it against our own previously stored BuildInfo, but the stored
1467 BuildInfo from another Node (typically one in a Repository)
1468 can be used instead.
1470 Note that we now *always* check every dependency. We used to
1471 short-circuit the check by returning as soon as we detected
1472 any difference, but we now rely on checking every dependency
1473 to make sure that any necessary Node information (for example,
1474 the content signature of an #included .h file) is updated.
1476 The allowcache option was added for supporting the early
1477 release of the executor/builder structures, right after
1478 a File target was built. When set to true, the return
1479 value of this changed method gets cached for File nodes.
1480 Like this, the executor isn't needed any longer for subsequent
1481 calls to changed().
1483 @see: FS.File.changed(), FS.File.release_target_info()
1485 T = False
1486 if T: Trace('changed(%s [%s], %s)' % (self, classname(self), node))
1487 if node is None:
1488 node = self
1490 result = False
1492 bi = node.get_stored_info().binfo
1493 then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs
1494 children = self.children()
1496 diff = len(children) - len(then)
1497 if diff:
1498 # The old and new dependency lists are different lengths.
1499 # This always indicates that the Node must be rebuilt.
1500 # We also extend the old dependency list with enough None
1501 # entries to equal the new dependency list, for the benefit
1502 # of the loop below that updates node information.
1503 then.extend([None] * diff)
1504 if T: Trace(': old %s new %s' % (len(then), len(children)))
1505 result = True
1507 for child, prev_ni in zip(children, then):
1508 if _decider_map[child.changed_since_last_build](child, self, prev_ni, node):
1509 if T: Trace(f": '{child!s}' changed")
1510 result = True
1512 if self.has_builder():
1513 contents = self.get_executor().get_contents()
1514 newsig = hash_signature(contents)
1515 if bi.bactsig != newsig:
1516 if T: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig))
1517 result = True
1519 if not result:
1520 if T: Trace(': up to date')
1521 if T: Trace('\n')
1523 return result
1525 def is_up_to_date(self) -> bool:
1526 """Default check for whether the Node is current: unknown Node
1527 subtypes are always out of date, so they will always get built."""
1528 return False
1530 def children_are_up_to_date(self) -> bool:
1531 """Alternate check for whether the Node is current: If all of
1532 our children were up-to-date, then this Node was up-to-date, too.
1534 The SCons.Node.Alias and SCons.Node.Python.Value subclasses
1535 rebind their current() method to this method."""
1536 # Allow the children to calculate their signatures.
1537 self.binfo = self.get_binfo()
1538 if self.always_build:
1539 return False
1540 state = 0
1541 for kid in self.children(False):
1542 s = kid.get_state()
1543 if s and (not state or s > state):
1544 state = s
1545 return (state == 0 or state == SCons.Node.up_to_date)
1547 def is_literal(self) -> bool:
1548 """Always pass the string representation of a Node to
1549 the command interpreter literally."""
1550 return True
1552 def render_include_tree(self):
1554 Return a text representation, suitable for displaying to the
1555 user, of the include tree for the sources of this node.
1557 if self.is_derived():
1558 env = self.get_build_env()
1559 if env:
1560 for s in self.sources:
1561 scanner = self.get_source_scanner(s)
1562 if scanner:
1563 path = self.get_build_scanner_path(scanner)
1564 else:
1565 path = None
1566 def f(node: Node, env: Environment = env, scanner: ScannerBase = scanner, path=path):
1567 return node.get_found_includes(env, scanner, path)
1568 return render_tree(s, f, 1)
1569 else:
1570 return None
1572 def get_abspath(self) -> str:
1574 Return an absolute path to the Node. This will return simply
1575 str(Node) by default, but for Node types that have a concept of
1576 relative path, this might return something different.
1578 return str(self)
1580 def for_signature(self) -> str:
1582 Return a string representation of the Node that will always
1583 be the same for this particular Node, no matter what. This
1584 is by contrast to the __str__() method, which might, for
1585 instance, return a relative path for a file Node. The purpose
1586 of this method is to generate a value to be used in signature
1587 calculation for the command line used to build a target, and
1588 we use this method instead of str() to avoid unnecessary
1589 rebuilds. This method does not need to return something that
1590 would actually work in a command line; it can return any kind of
1591 nonsense, so long as it does not change.
1593 return str(self)
1595 def get_string(self, for_signature: bool) -> str:
1596 """This is a convenience function designed primarily to be
1597 used in command generators (i.e., CommandGeneratorActions or
1598 Environment variables that are callable), which are called
1599 with a for_signature argument that is nonzero if the command
1600 generator is being called to generate a signature for the
1601 command line, which determines if we should rebuild or not.
1603 Such command generators should use this method in preference
1604 to str(Node) when converting a Node to a string, passing
1605 in the for_signature parameter, such that we will call
1606 Node.for_signature() or str(Node) properly, depending on whether
1607 we are calculating a signature or actually constructing a
1608 command line."""
1609 if for_signature:
1610 return self.for_signature()
1611 return str(self)
1613 def get_subst_proxy(self):
1615 This method is expected to return an object that will function
1616 exactly like this Node, except that it implements any additional
1617 special features that we would like to be in effect for
1618 Environment variable substitution. The principle use is that
1619 some Nodes would like to implement a __getattr__() method,
1620 but putting that in the Node type itself has a tendency to kill
1621 performance. We instead put it in a proxy and return it from
1622 this method. It is legal for this method to return self
1623 if no new functionality is needed for Environment substitution.
1625 return self
1627 def explain(self):
1628 if not self.exists():
1629 return "building `%s' because it doesn't exist\n" % self
1631 if self.always_build:
1632 return "rebuilding `%s' because AlwaysBuild() is specified\n" % self
1634 old = self.get_stored_info()
1635 if old is None:
1636 return None
1638 old = old.binfo
1639 old.prepare_dependencies()
1641 try:
1642 old_bkids = old.bsources + old.bdepends + old.bimplicit
1643 old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs
1644 except AttributeError:
1645 return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self
1647 new = self.get_binfo()
1649 new_bkids = new.bsources + new.bdepends + new.bimplicit
1650 new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs
1652 osig = dict(list(zip(old_bkids, old_bkidsigs)))
1653 nsig = dict(list(zip(new_bkids, new_bkidsigs)))
1655 # The sources and dependencies we'll want to report are all stored
1656 # as relative paths to this target's directory, but we want to
1657 # report them relative to the top-level SConstruct directory,
1658 # so we only print them after running them through this lambda
1659 # to turn them into the right relative Node and then return
1660 # its string.
1661 def stringify( s, E=self.dir.Entry):
1662 if hasattr( s, 'dir' ) :
1663 return str(E(s))
1664 return str(s)
1666 lines = []
1668 removed = [x for x in old_bkids if x not in new_bkids]
1669 if removed:
1670 removed = [stringify(r) for r in removed]
1671 fmt = "`%s' is no longer a dependency\n"
1672 lines.extend([fmt % s for s in removed])
1674 for k in new_bkids:
1675 if k not in old_bkids:
1676 lines.append("`%s' is a new dependency\n" % stringify(k))
1677 else:
1678 changed = _decider_map[k.changed_since_last_build](k, self, osig[k])
1680 if changed:
1681 lines.append("`%s' changed\n" % stringify(k))
1683 if len(lines) == 0 and old_bkids != new_bkids:
1684 lines.append("the dependency order changed:\n")
1685 lines.append("->Sources\n")
1686 for (o,n) in zip_longest(old.bsources, new.bsources, fillvalue=None):
1687 lines.append("Old:%s\tNew:%s\n"%(o,n))
1688 lines.append("->Depends\n")
1689 for (o,n) in zip_longest(old.bdepends, new.bdepends, fillvalue=None):
1690 lines.append("Old:%s\tNew:%s\n"%(o,n))
1691 lines.append("->Implicit\n")
1692 for (o,n) in zip_longest(old.bimplicit, new.bimplicit, fillvalue=None):
1693 lines.append("Old:%s\tNew:%s\n"%(o,n))
1695 if len(lines) == 0:
1696 def fmt_with_title(title, strlines):
1697 lines = strlines.split('\n')
1698 sep = '\n' + ' '*(15 + len(title))
1699 return ' '*15 + title + sep.join(lines) + '\n'
1700 if old.bactsig != new.bactsig:
1701 if old.bact == new.bact:
1702 lines.append("the contents of the build action changed\n" +
1703 fmt_with_title('action: ', new.bact))
1705 # lines.append("the contents of the build action changed [%s] [%s]\n"%(old.bactsig,new.bactsig) +
1706 # fmt_with_title('action: ', new.bact))
1707 else:
1708 lines.append("the build action changed:\n" +
1709 fmt_with_title('old: ', old.bact) +
1710 fmt_with_title('new: ', new.bact))
1712 if len(lines) == 0:
1713 return "rebuilding `%s' for unknown reasons\n" % self
1715 preamble = "rebuilding `%s' because" % self
1716 if len(lines) == 1:
1717 return "%s %s" % (preamble, lines[0])
1718 else:
1719 lines = ["%s:\n" % preamble] + lines
1720 return ( ' '*11).join(lines)
1722 class NodeList(collections.UserList):
1723 def __str__(self) -> str:
1724 return str(list(map(str, self.data)))
1726 def get_children(node: Node, parent: Node | None) -> list[Node]: return node.children()
1727 def ignore_cycle(node: Node, stack: list[Node]) -> None: pass
1728 def do_nothing(node: Node, parent: Node | None) -> None: pass
1730 class Walker:
1731 """An iterator for walking a Node tree.
1733 This is depth-first, children are visited before the parent.
1734 The Walker object can be initialized with any node, and
1735 returns the next node on the descent with each get_next() call.
1736 get the children of a node instead of calling 'children'.
1737 'cycle_func' is an optional function that will be called
1738 when a cycle is detected.
1740 This class does not get caught in node cycles caused, for example,
1741 by C header file include loops.
1743 def __init__(
1744 self,
1745 node: Node,
1746 kids_func: Callable[[Node, Node | None], list[Node]] = get_children,
1747 cycle_func: Callable[[Node, list[Node]], None] = ignore_cycle,
1748 eval_func: Callable[[Node, Node | None], None] = do_nothing,
1749 ) -> None:
1750 self.kids_func = kids_func
1751 self.cycle_func = cycle_func
1752 self.eval_func = eval_func
1753 node.wkids = copy.copy(kids_func(node, None))
1754 self.stack = [node]
1755 self.history: dict[Node, Any | None] = {} # used to efficiently detect and avoid cycles
1756 self.history[node] = None
1758 def get_next(self):
1759 """Return the next node for this walk of the tree.
1761 This function is intentionally iterative, not recursive,
1762 to sidestep any issues of stack size limitations.
1765 while self.stack:
1766 if self.stack[-1].wkids:
1767 node = self.stack[-1].wkids.pop(0)
1768 if not self.stack[-1].wkids:
1769 self.stack[-1].wkids = None
1770 if node in self.history:
1771 self.cycle_func(node, self.stack)
1772 else:
1773 node.wkids = copy.copy(self.kids_func(node, self.stack[-1]))
1774 self.stack.append(node)
1775 self.history[node] = None
1776 else:
1777 node = self.stack.pop()
1778 del self.history[node]
1779 if node:
1780 if self.stack:
1781 parent = self.stack[-1]
1782 else:
1783 parent = None
1784 self.eval_func(node, parent)
1785 return node
1786 return None
1788 def is_done(self) -> bool:
1789 return not self.stack
1792 arg2nodes_lookups = []
1794 # Local Variables:
1795 # tab-width:4
1796 # indent-tabs-mode:nil
1797 # End:
1798 # vim: set expandtab tabstop=4 shiftwidth=4: