3 # Copyright The SCons Foundation
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 """The Node package for the SCons software construction utility.
26 This is, in many ways, the heart of SCons.
28 A Node is where we encapsulate all of the dependency information about
29 any thing that SCons can build, or about any thing which SCons can use
30 to build some other thing. The canonical "thing," of course, is a file,
31 but a Node can also represent something remote (like a web page) or
32 something completely abstract (like an Alias).
34 Each specific type of "thing" is specifically represented by a subclass
35 of the Node base class: Node.FS.File for files, Node.Alias for aliases,
36 etc. Dependency information is kept here in the base class, and
37 information specific to files/aliases/etc. is in the subclass. The
38 goal, if we've done this correctly, is that any type of "thing" should
39 be able to depend on any other type of "thing."
45 from itertools
import chain
, zip_longest
50 from SCons
.compat
import NoSlotsPyPy
51 from SCons
.Debug
import logInstanceCreation
, Trace
52 from SCons
.Util
import hash_signature
, is_List
, UniqueList
, render_tree
57 return str(obj
.__class
__).split('.')[-1]
59 # Set to false if we're doing a dry run. There's more than one of these
65 # These are in "priority" order, so that the maximum value for any
66 # child/dependency of a node represents the state of that node if
67 # it has no builder of its own. The canonical example is a file
68 # system directory, which is only up to date if all of its children
86 # controls whether implicit dependencies are cached:
89 # controls whether implicit dep changes are ignored:
90 implicit_deps_unchanged
= 0
92 # controls whether the cached implicit deps are ignored:
93 implicit_deps_changed
= 0
95 # A variable that can be set to an interface-specific function be called
96 # to annotate a Node with information about its creation.
97 def do_nothing_node(node
) -> None: pass
99 Annotate
= do_nothing_node
101 # global set for recording all processed SContruct/SConscript nodes
102 SConscriptNodes
= set()
104 # Gets set to 'True' if we're running in interactive mode. Is
105 # currently used to release parts of a target's info during
106 # clean builds and update runs (see release_target_info).
109 def is_derived_none(node
):
110 raise NotImplementedError
112 def is_derived_node(node
) -> bool:
114 Returns true if this node is derived (i.e. built).
116 return node
.has_builder() or node
.side_effect
118 _is_derived_map
= {0 : is_derived_none
,
121 def exists_none(node
) -> bool:
122 raise NotImplementedError
124 def exists_always(node
) -> bool:
127 def exists_base(node
) -> bool:
128 return node
.stat() is not None
130 def exists_entry(node
) -> bool:
131 """Return if the Entry exists. Check the file system to see
132 what we should turn into first. Assume a file if there's no
135 return _exists_map
[node
._func
_exists
](node
)
138 def exists_file(node
) -> bool:
139 # Duplicate from source path if we are set up to do this.
140 if node
.duplicate
and not node
.is_derived() and not node
.linked
:
143 # At this point, src is meant to be copied in a variant directory.
145 if src
.get_abspath() != node
.get_abspath():
147 node
.do_duplicate(src
)
148 # Can't return 1 here because the duplication might
149 # not actually occur if the -n option is being used.
151 # The source file does not exist. Make sure no old
152 # copy remains in the variant directory.
154 print("dup: no src for %s, unlinking old variant copy" % node
)
155 if exists_base(node
) or node
.islink():
156 node
.fs
.unlink(node
.get_internal_path())
157 # Return None explicitly because the Base.exists() call
158 # above will have cached its value if the file existed.
160 return exists_base(node
)
162 _exists_map
= {0 : exists_none
,
169 def rexists_none(node
):
170 raise NotImplementedError
172 def rexists_node(node
):
175 def rexists_base(node
):
176 return node
.rfile().exists()
178 _rexists_map
= {0 : rexists_none
,
182 def get_contents_none(node
):
183 raise NotImplementedError
185 def get_contents_entry(node
):
186 """Fetch the contents of the entry. Returns the exact binary
187 contents of the file."""
189 node
= node
.disambiguate(must_exist
=1)
190 except SCons
.Errors
.UserError
:
191 # There was nothing on disk with which to disambiguate
192 # this entry. Leave it as an Entry, but return a null
193 # string so calls to get_contents() in emitters and the
194 # like (e.g. in qt.py) don't have to disambiguate by hand
195 # or catch the exception.
198 return _get_contents_map
[node
._func
_get
_contents
](node
)
200 def get_contents_dir(node
):
201 """Return content signatures and names of all our children
202 separated by new-lines. Ensure that the nodes are sorted."""
204 for n
in sorted(node
.children(), key
=lambda t
: t
.name
):
205 contents
.append('%s %s\n' % (n
.get_csig(), n
.name
))
206 return ''.join(contents
)
208 def get_contents_file(node
):
209 if not node
.rexists():
211 fname
= node
.rfile().get_abspath()
213 with
open(fname
, "rb") as fp
:
215 except EnvironmentError as e
:
221 _get_contents_map
= {0 : get_contents_none
,
222 1 : get_contents_entry
,
223 2 : get_contents_dir
,
224 3 : get_contents_file
}
226 def target_from_source_none(node
, prefix
, suffix
, splitext
):
227 raise NotImplementedError
229 def target_from_source_base(node
, prefix
, suffix
, splitext
):
230 return node
.dir.Entry(prefix
+ splitext(node
.name
)[0] + suffix
)
232 _target_from_source_map
= {0 : target_from_source_none
,
233 1 : target_from_source_base
}
236 # The new decider subsystem for Nodes
238 # We would set and overwrite the changed_since_last_build function
239 # before, but for being able to use slots (less memory!) we now have
240 # a dictionary of the different decider functions. Then in the Node
241 # subclasses we simply store the index to the decider that should be
246 # First, the single decider functions
248 def changed_since_last_build_node(node
, target
, prev_ni
, repo_node
=None) -> bool:
251 Must be overridden in a specific subclass to return True if this
252 Node (a dependency) has changed since the last time it was used
253 to build the specified target. prev_ni is this Node's state (for
254 example, its file timestamp, length, maybe content signature)
255 as of the last time the target was built.
257 Note that this method is called through the dependency, not the
258 target, because a dependency Node must be able to use its own
259 logic to decide if it changed. For example, File Nodes need to
260 obey if we're configured to use timestamps, but Python Value Nodes
261 never use timestamps and always use the content. If this method
262 were called through the target, then each Node's implementation
263 of this method would have to have more complicated logic to
264 handle all the different Node types on which it might depend.
266 raise NotImplementedError
269 def changed_since_last_build_alias(node
, target
, prev_ni
, repo_node
=None) -> bool:
270 cur_csig
= node
.get_csig()
272 return cur_csig
!= prev_ni
.csig
273 except AttributeError:
277 def changed_since_last_build_entry(node
, target
, prev_ni
, repo_node
=None) -> bool:
279 return _decider_map
[node
.changed_since_last_build
](node
, target
, prev_ni
, repo_node
)
282 def changed_since_last_build_state_changed(node
, target
, prev_ni
, repo_node
=None) -> bool:
283 return node
.state
!= SCons
.Node
.up_to_date
286 def decide_source(node
, target
, prev_ni
, repo_node
=None) -> bool:
287 return target
.get_build_env().decide_source(node
, target
, prev_ni
, repo_node
)
290 def decide_target(node
, target
, prev_ni
, repo_node
=None) -> bool:
291 return target
.get_build_env().decide_target(node
, target
, prev_ni
, repo_node
)
294 def changed_since_last_build_python(node
, target
, prev_ni
, repo_node
=None) -> bool:
295 cur_csig
= node
.get_csig()
297 return cur_csig
!= prev_ni
.csig
298 except AttributeError:
303 # Now, the mapping from indices to decider functions
305 _decider_map
= {0 : changed_since_last_build_node
,
306 1 : changed_since_last_build_alias
,
307 2 : changed_since_last_build_entry
,
308 3 : changed_since_last_build_state_changed
,
311 6 : changed_since_last_build_python
}
316 # The new store_info subsystem for Nodes
318 # We would set and overwrite the store_info function
319 # before, but for being able to use slots (less memory!) we now have
320 # a dictionary of the different functions. Then in the Node
321 # subclasses we simply store the index to the info method that should be
326 # First, the single info functions
329 def store_info_pass(node
) -> None:
332 def store_info_file(node
) -> None:
333 # Merge our build information into the already-stored entry.
334 # This accommodates "chained builds" where a file that's a target
335 # in one build (SConstruct file) is a source in a different build.
336 # See test/chained-build.py for the use case.
338 node
.dir.sconsign().store_info(node
.name
, node
)
341 store_info_map
= {0 : store_info_pass
,
344 # Classes for signature info for Nodes.
348 The generic base class for signature information for a Node.
350 Node subclasses should subclass NodeInfoBase to provide their own
351 logic for dealing with their own Node-specific signature information.
353 __slots__
= ('__weakref__',)
354 current_version_id
= 2
356 def update(self
, node
) -> None:
358 field_list
= self
.field_list
359 except AttributeError:
364 except AttributeError:
367 func
= getattr(node
, 'get_' + f
)
368 except AttributeError:
371 setattr(self
, f
, func())
373 def convert(self
, node
, val
) -> None:
376 def merge(self
, other
) -> None:
378 Merge the fields of another object into this object. Already existing
379 information is overwritten by the other instance's data.
380 WARNING: If a '__dict__' slot is added, it should be updated instead of
383 state
= other
.__getstate
__()
384 self
.__setstate
__(state
)
386 def format(self
, field_list
=None, names
: int=0):
387 if field_list
is None:
389 field_list
= self
.field_list
390 except AttributeError:
391 field_list
= list(getattr(self
, '__dict__', {}).keys())
392 for obj
in type(self
).mro():
393 for slot
in getattr(obj
, '__slots__', ()):
394 if slot
not in ('__weakref__', '__dict__'):
395 field_list
.append(slot
)
398 for field
in field_list
:
400 f
= getattr(self
, field
)
401 except AttributeError:
409 def __getstate__(self
):
411 Return all fields that shall be pickled. Walk the slots in the class
412 hierarchy and add those to the state dictionary. If a '__dict__' slot is
413 available, copy all entries to the dictionary. Also include the version
414 id, which is fixed for all instances of a class.
416 state
= getattr(self
, '__dict__', {}).copy()
417 for obj
in type(self
).mro():
418 for name
in getattr(obj
,'__slots__',()):
419 if hasattr(self
, name
):
420 state
[name
] = getattr(self
, name
)
422 state
['_version_id'] = self
.current_version_id
424 del state
['__weakref__']
429 def __setstate__(self
, state
) -> None:
431 Restore the attributes from a pickled state. The version is discarded.
433 # TODO check or discard version
434 del state
['_version_id']
436 for key
, value
in state
.items():
437 if key
not in ('__weakref__',):
438 setattr(self
, key
, value
)
443 The generic base class for build information for a Node.
445 This is what gets stored in a .sconsign file for each target file.
446 It contains a NodeInfo instance for this node (signature information
447 that's specific to the type of Node) and direct attributes for the
448 generic build stuff we have to track: sources, explicit dependencies,
449 implicit dependencies, and action information.
451 __slots__
= ("bsourcesigs", "bdependsigs", "bimplicitsigs", "bactsig",
452 "bsources", "bdepends", "bact", "bimplicit", "__weakref__")
453 current_version_id
= 2
455 def __init__(self
) -> None:
456 # Create an object attribute from the class attribute so it ends up
457 # in the pickled data in the .sconsign file.
458 self
.bsourcesigs
= []
459 self
.bdependsigs
= []
460 self
.bimplicitsigs
= []
463 def merge(self
, other
) -> None:
465 Merge the fields of another object into this object. Already existing
466 information is overwritten by the other instance's data.
467 WARNING: If a '__dict__' slot is added, it should be updated instead of
470 state
= other
.__getstate
__()
471 self
.__setstate
__(state
)
473 def __getstate__(self
):
475 Return all fields that shall be pickled. Walk the slots in the class
476 hierarchy and add those to the state dictionary. If a '__dict__' slot is
477 available, copy all entries to the dictionary. Also include the version
478 id, which is fixed for all instances of a class.
480 state
= getattr(self
, '__dict__', {}).copy()
481 for obj
in type(self
).mro():
482 for name
in getattr(obj
,'__slots__',()):
483 if hasattr(self
, name
):
484 state
[name
] = getattr(self
, name
)
486 state
['_version_id'] = self
.current_version_id
488 del state
['__weakref__']
493 def __setstate__(self
, state
) -> None:
495 Restore the attributes from a pickled state.
497 # TODO check or discard version
498 del state
['_version_id']
499 for key
, value
in state
.items():
500 if key
not in ('__weakref__',):
501 setattr(self
, key
, value
)
504 class Node(object, metaclass
=NoSlotsPyPy
):
505 """The base Node class, for entities that we know how to
506 build, or use to build other Nodes.
509 __slots__
= ['sources',
542 'changed_since_last_build',
549 '_func_get_contents',
550 '_func_target_from_source']
553 __slots__
= ('shared', '__dict__')
556 def __init__(self
) -> None:
557 if SCons
.Debug
.track_instances
: logInstanceCreation(self
, 'Node.Node')
558 # Note that we no longer explicitly initialize a self.builder
559 # attribute to None here. That's because the self.builder
560 # attribute may be created on-the-fly later by a subclass (the
561 # canonical example being a builder to fetch a file from a
562 # source code system like CVS or Subversion).
564 # Each list of children that we maintain is accompanied by a
565 # dictionary used to look up quickly whether a node is already
566 # present in the list. Empirical tests showed that it was
567 # fastest to maintain them as side-by-side Node attributes in
568 # this way, instead of wrapping up each list+dictionary pair in
569 # a class. (Of course, we could always still do that in the
570 # future if we had a good reason to...).
571 self
.sources
= [] # source files used to build node
572 self
.sources_set
= set()
573 self
._specific
_sources
= False
574 self
.depends
= [] # explicit dependencies (from Depends)
575 self
.depends_set
= set()
576 self
.ignore
= [] # dependencies to ignore
577 self
.ignore_set
= set()
578 self
.prerequisites
= None
579 self
.implicit
= None # implicit (scanned) dependencies (None means not scanned yet)
580 self
.waiting_parents
= set()
581 self
.waiting_s_e
= set()
583 self
.wkids
= None # Kids yet to walk, when it's an array
586 self
.state
= no_state
591 self
.cached
= 0 # is this node pulled from cache?
592 self
.always_build
= None
594 self
.attributes
= self
.Attrs() # Generic place to stick information about the Node.
595 self
.side_effect
= 0 # true iff this node is a side effect
596 self
.side_effects
= [] # the side effects of building this target
597 self
.linked
= 0 # is this node linked to the variant directory?
598 self
.changed_since_last_build
= 0
601 self
._func
_is
_derived
= 1
602 self
._func
_exists
= 1
603 self
._func
_rexists
= 1
604 self
._func
_get
_contents
= 0
605 self
._func
_target
_from
_source
= 0
608 self
.clear_memoized_values()
610 # Let the interface in which the build engine is embedded
611 # annotate this Node with its own info (like a description of
612 # what line in what file created the node, for example).
615 def disambiguate(self
, must_exist
=None):
618 def get_suffix(self
) -> str:
621 @SCons.Memoize
.CountMethodCall
622 def get_build_env(self
):
623 """Fetch the appropriate Environment to build this node.
626 return self
._memo
['get_build_env']
629 result
= self
.get_executor().get_build_env()
630 self
._memo
['get_build_env'] = result
633 def get_build_scanner_path(self
, scanner
):
634 """Fetch the appropriate scanner path for this node."""
635 return self
.get_executor().get_build_scanner_path(scanner
)
637 def set_executor(self
, executor
) -> None:
638 """Set the action executor for this node."""
639 self
.executor
= executor
641 def get_executor(self
, create
: int=1):
642 """Fetch the action executor for this node. Create one if
643 there isn't already one, and requested to do so."""
645 executor
= self
.executor
646 except AttributeError:
650 act
= self
.builder
.action
651 except AttributeError:
652 executor
= SCons
.Executor
.Null(targets
=[self
])
654 executor
= SCons
.Executor
.Executor(act
,
655 self
.env
or self
.builder
.env
,
656 [self
.builder
.overrides
],
659 self
.executor
= executor
662 def executor_cleanup(self
) -> None:
663 """Let the executor clean up any cached information."""
665 executor
= self
.get_executor(create
=None)
666 except AttributeError:
669 if executor
is not None:
672 def reset_executor(self
) -> None:
673 """Remove cached executor; forces recompute when needed."""
675 delattr(self
, 'executor')
676 except AttributeError:
679 def push_to_cache(self
) -> None:
680 """Try to push a node into a cache
684 def retrieve_from_cache(self
) -> bool:
685 """Try to retrieve the node's content from a cache
687 This method is called from multiple threads in a parallel build,
688 so only do thread safe stuff here. Do thread unsafe stuff
691 Returns true if the node was successfully retrieved.
696 # Taskmaster interface subsystem
699 def make_ready(self
) -> None:
700 """Get a Node ready for evaluation.
702 This is called before the Taskmaster decides if the Node is
703 up-to-date or not. Overriding this method allows for a Node
704 subclass to be disambiguated if necessary, or for an implicit
705 source builder to be attached.
710 """Prepare for this Node to be built.
712 This is called after the Taskmaster has decided that the Node
713 is out-of-date and must be rebuilt, but before actually calling
714 the method to build the Node.
716 This default implementation checks that explicit or implicit
717 dependencies either exist or are derived, and initializes the
718 BuildInfo structure that will hold the information about how
719 this node is, uh, built.
721 (The existence of source files is checked separately by the
722 Executor, which aggregates checks for all of the targets built
723 by a specific action.)
725 Overriding this method allows for for a Node subclass to remove
726 the underlying file from the file system. Note that subclass
727 methods should call this base class method to get the child
728 check and the BuildInfo structure.
730 if self
.depends
is not None:
731 for d
in self
.depends
:
733 msg
= "Explicit dependency `%s' not found, needed by target `%s'."
734 raise SCons
.Errors
.StopError(msg
% (d
, self
))
735 if self
.implicit
is not None:
736 for i
in self
.implicit
:
738 msg
= "Implicit dependency `%s' not found, needed by target `%s'."
739 raise SCons
.Errors
.StopError(msg
% (i
, self
))
740 self
.binfo
= self
.get_binfo()
742 def build(self
, **kw
):
743 """Actually build the node.
745 This is called by the Taskmaster after it's decided that the
746 Node is out-of-date and must be rebuilt, and after the
747 :meth:`prepare` method has gotten everything, uh, prepared.
749 This method is called from multiple threads in a parallel build,
750 so only do thread safe stuff here. Do thread unsafe stuff
755 self
.get_executor()(self
, **kw
)
756 except SCons
.Errors
.BuildError
as e
:
760 def built(self
) -> None:
761 """Called just after this node is successfully built."""
763 # Clear the implicit dependency caches of any Nodes
764 # waiting for this Node to be built.
765 for parent
in self
.waiting_parents
:
766 parent
.implicit
= None
768 # Handle issue where builder emits more than one target and
769 # the source file for the builder is generated.
770 # in that case only the first target was getting it's .implicit
771 # cleared when the source file is built (second scan).
772 # leaving only partial implicits from scan before source file is generated
773 # typically the compiler only. Then scanned files are appended
774 # This is persisted to sconsign and rebuild causes false rebuilds
775 # because the ordering of the implicit list then changes to what it
777 # This is at least the following bugs
778 # https://github.com/SCons/scons/issues/2811
779 # https://jira.mongodb.org/browse/SERVER-33111
781 for peer
in parent
.target_peers
:
783 except AttributeError:
790 raise SCons
.Errors
.UserError("Pseudo target " + str(self
) + " must not exist")
792 if not self
.exists() and do_store_info
:
793 SCons
.Warnings
.warn(SCons
.Warnings
.TargetNotBuiltWarning
,
794 "Cannot find target " + str(self
) + " after building")
795 self
.ninfo
.update(self
)
797 def visited(self
) -> None:
798 """Called just after this node has been visited (with or
802 except AttributeError:
803 # Apparently this node doesn't need build info, so
804 # don't bother calculating or storing it.
807 self
.ninfo
.update(self
)
808 SCons
.Node
.store_info_map
[self
.store_info
](self
)
810 def release_target_info(self
) -> None:
811 """Called just after this node has been marked
812 up-to-date or was built completely.
814 This is where we try to release as many target node infos
815 as possible for clean builds and update runs, in order
816 to minimize the overall memory consumption.
818 By purging attributes that aren't needed any longer after
819 a Node (=File) got built, we don't have to care that much how
820 many KBytes a Node actually requires...as long as we free
821 the memory shortly afterwards.
823 @see: built() and File.release_target_info()
827 def add_to_waiting_s_e(self
, node
) -> None:
828 self
.waiting_s_e
.add(node
)
830 def add_to_waiting_parents(self
, node
) -> int:
832 Returns the number of nodes added to our waiting parents list:
833 1 if we add a unique waiting parent, 0 if not. (Note that the
834 returned values are intended to be used to increment a reference
835 count, so don't think you can "clean up" this function by using
836 True and False instead...)
838 wp
= self
.waiting_parents
844 def postprocess(self
) -> None:
845 """Clean up anything we don't need to hang onto after we've
847 self
.executor_cleanup()
848 self
.waiting_parents
= set()
850 def clear(self
) -> None:
851 """Completely clear a Node of all its cached state (so that it
852 can be re-evaluated by interfaces that do continuous integration
855 # The del_binfo() call here isn't necessary for normal execution,
856 # but is for interactive mode, where we might rebuild the same
857 # target and need to start from scratch.
859 self
.clear_memoized_values()
860 self
.ninfo
= self
.new_ninfo()
861 self
.executor_cleanup()
862 for attr
in ['cachedir_csig', 'cachesig', 'contentsig']:
865 except AttributeError:
870 def clear_memoized_values(self
) -> None:
873 def builder_set(self
, builder
) -> None:
874 self
.builder
= builder
877 except AttributeError:
880 def has_builder(self
) -> bool:
881 """Return whether this Node has a builder or not.
883 In Boolean tests, this turns out to be a *lot* more efficient
884 than simply examining the builder attribute directly ("if
885 node.builder: ..."). When the builder attribute is examined
886 directly, it ends up calling __getattr__ for both the __len__
887 and __bool__ attributes on instances of our Builder Proxy
888 class(es), generating a bazillion extra calls and slowing
889 things down immensely.
893 except AttributeError:
894 # There was no explicit builder for this Node, so initialize
895 # the self.builder attribute to None now.
896 b
= self
.builder
= None
899 def set_explicit(self
, is_explicit
) -> None:
900 self
.is_explicit
= is_explicit
902 def has_explicit_builder(self
) -> bool:
903 """Return whether this Node has an explicit builder.
905 This allows an internal Builder created by SCons to be marked
906 non-explicit, so that it can be overridden by an explicit
907 builder that the user supplies (the canonical example being
910 return self
.is_explicit
911 except AttributeError:
912 self
.is_explicit
= False
915 def get_builder(self
, default_builder
=None):
916 """Return the set builder, or a specified default value"""
919 except AttributeError:
920 return default_builder
922 multiple_side_effect_has_builder
= has_builder
924 def is_derived(self
) -> bool:
926 Returns true if this node is derived (i.e. built).
928 This should return true only for nodes whose path should be in
929 the variant directory when duplicate=0 and should contribute their build
930 signatures when they are used as source files to other derived files. For
931 example: source with source builders are not derived in this sense,
932 and hence should not return true.
934 return _is_derived_map
[self
._func
_is
_derived
](self
)
936 def is_sconscript(self
) -> bool:
937 """ Returns true if this node is an sconscript """
938 return self
in SConscriptNodes
940 def is_conftest(self
) -> bool:
941 """ Returns true if this node is an conftest node"""
943 self
.attributes
.conftest_node
944 except AttributeError:
948 def check_attributes(self
, name
):
949 """ Simple API to check if the node.attributes for name has been set"""
950 return getattr(getattr(self
, "attributes", None), name
, None)
953 def alter_targets(self
):
954 """Return a list of alternate targets for this Node.
958 def get_found_includes(self
, env
, scanner
, path
):
959 """Return the scanned include lines (implicit dependencies)
962 The default is no implicit dependencies. We expect this method
963 to be overridden by any subclass that can be scanned for
964 implicit dependencies.
968 def get_implicit_deps(self
, env
, initial_scanner
, path_func
, kw
= {}):
969 """Return a list of implicit dependencies for this node.
971 This method exists to handle recursive invocation of the scanner
972 on the implicit dependencies returned by the scanner, if the
973 scanner's recursive flag says that we should.
980 root_node_scanner
= self
._get
_scanner
(env
, initial_scanner
, None, kw
)
985 scanner
= node
._get
_scanner
(env
, initial_scanner
, root_node_scanner
, kw
)
990 path
= path_memo
[scanner
]
992 path
= path_func(scanner
)
993 path_memo
[scanner
] = path
995 included_deps
= [x
for x
in node
.get_found_includes(env
, scanner
, path
) if x
not in seen
]
997 dependencies
.extend(included_deps
)
998 seen
.update(included_deps
)
999 nodes
.extend(scanner
.recurse_nodes(included_deps
))
1003 def _get_scanner(self
, env
, initial_scanner
, root_node_scanner
, kw
):
1005 # handle explicit scanner case
1006 scanner
= initial_scanner
.select(self
)
1008 # handle implicit scanner case
1009 scanner
= self
.get_env_scanner(env
, kw
)
1011 scanner
= scanner
.select(self
)
1014 # no scanner could be found for the given node's scanner key;
1015 # thus, make an attempt at using a default.
1016 scanner
= root_node_scanner
1020 def get_env_scanner(self
, env
, kw
={}):
1021 return env
.get_scanner(self
.scanner_key())
1023 def get_target_scanner(self
):
1024 return self
.builder
.target_scanner
1026 def get_source_scanner(self
, node
):
1027 """Fetch the source scanner for the specified node
1029 NOTE: "self" is the target being built, "node" is
1030 the source file for which we want to fetch the scanner.
1032 Implies self.has_builder() is true; again, expect to only be
1033 called from locations where this is already verified.
1035 This function may be called very often; it attempts to cache
1036 the scanner found to improve performance.
1040 scanner
= self
.builder
.source_scanner
1041 except AttributeError:
1044 # The builder didn't have an explicit scanner, so go look up
1045 # a scanner from env['SCANNERS'] based on the node's scanner
1046 # key (usually the file extension).
1047 scanner
= self
.get_env_scanner(self
.get_build_env())
1049 scanner
= scanner
.select(node
)
1052 def add_to_implicit(self
, deps
) -> None:
1053 if not hasattr(self
, 'implicit') or self
.implicit
is None:
1055 self
.implicit_set
= set()
1056 self
._children
_reset
()
1057 self
._add
_child
(self
.implicit
, self
.implicit_set
, deps
)
1059 def scan(self
) -> None:
1060 """Scan this node's dependents for implicit dependencies."""
1061 # Don't bother scanning non-derived files, because we don't
1062 # care what their dependencies are.
1063 # Don't scan again, if we already have scanned.
1064 if self
.implicit
is not None:
1067 self
.implicit_set
= set()
1068 self
._children
_reset
()
1069 if not self
.has_builder():
1072 build_env
= self
.get_build_env()
1073 executor
= self
.get_executor()
1075 # Here's where we implement --implicit-cache.
1076 if implicit_cache
and not implicit_deps_changed
:
1077 implicit
= self
.get_stored_implicit()
1078 if implicit
is not None:
1079 # We now add the implicit dependencies returned from the
1080 # stored .sconsign entry to have already been converted
1081 # to Nodes for us. (We used to run them through a
1082 # source_factory function here.)
1084 # Update all of the targets with them. This
1085 # essentially short-circuits an N*M scan of the
1086 # sources for each individual target, which is a hell
1087 # of a lot more efficient.
1088 for tgt
in executor
.get_all_targets():
1089 tgt
.add_to_implicit(implicit
)
1091 if implicit_deps_unchanged
or self
.is_up_to_date():
1093 # one of this node's sources has changed,
1094 # so we must recalculate the implicit deps for all targets
1095 for tgt
in executor
.get_all_targets():
1097 tgt
.implicit_set
= set()
1099 # Have the executor scan the sources.
1100 executor
.scan_sources(self
.builder
.source_scanner
)
1102 # If there's a target scanner, have the executor scan the target
1103 # node itself and associated targets that might be built.
1104 scanner
= self
.get_target_scanner()
1106 executor
.scan_targets(scanner
)
1108 def scanner_key(self
):
1111 def select_scanner(self
, scanner
):
1112 """Selects a scanner for this Node.
1114 This is a separate method so it can be overridden by Node
1115 subclasses (specifically, Node.FS.Dir) that *must* use their
1116 own Scanner and don't select one the Scanner.Selector that's
1117 configured for the target.
1119 return scanner
.select(self
)
1121 def env_set(self
, env
, safe
: bool=False) -> None:
1122 if safe
and self
.env
:
1127 # SIGNATURE SUBSYSTEM
1130 NodeInfo
= NodeInfoBase
1131 BuildInfo
= BuildInfoBase
1133 def new_ninfo(self
):
1134 ninfo
= self
.NodeInfo()
1137 def get_ninfo(self
):
1138 if self
.ninfo
is not None:
1140 self
.ninfo
= self
.new_ninfo()
1143 def new_binfo(self
):
1144 binfo
= self
.BuildInfo()
1147 def get_binfo(self
):
1149 Fetch a node's build information.
1151 node - the node whose sources will be collected
1152 cache - alternate node to use for the signature cache
1153 returns - the build signature
1155 This no longer handles the recursive descent of the
1156 node's children's signatures. We expect that they're
1157 already built and updated by someone else, if that's
1162 except AttributeError:
1165 binfo
= self
.new_binfo()
1168 executor
= self
.get_executor()
1169 ignore_set
= self
.ignore_set
1171 if self
.has_builder():
1172 binfo
.bact
= str(executor
)
1173 binfo
.bactsig
= hash_signature(executor
.get_contents())
1175 if self
._specific
_sources
:
1176 sources
= [s
for s
in self
.sources
if s
not in ignore_set
]
1179 sources
= executor
.get_unignored_sources(self
, self
.ignore
)
1182 binfo
.bsources
= [s
for s
in sources
if s
not in seen
and not seen
.add(s
)]
1183 binfo
.bsourcesigs
= [s
.get_ninfo() for s
in binfo
.bsources
]
1185 binfo
.bdepends
= [d
for d
in self
.depends
if d
not in ignore_set
]
1186 binfo
.bdependsigs
= [d
.get_ninfo() for d
in self
.depends
]
1188 # Because self.implicit is initialized to None (and not empty list [])
1189 # we have to handle this case
1190 if not self
.implicit
:
1191 binfo
.bimplicit
= []
1192 binfo
.bimplicitsigs
= []
1194 binfo
.bimplicit
= [i
for i
in self
.implicit
if i
not in ignore_set
]
1195 binfo
.bimplicitsigs
= [i
.get_ninfo() for i
in binfo
.bimplicit
]
1199 def del_binfo(self
) -> None:
1200 """Delete the build info from this node."""
1202 delattr(self
, 'binfo')
1203 except AttributeError:
1208 return self
.ninfo
.csig
1209 except AttributeError:
1210 ninfo
= self
.get_ninfo()
1211 ninfo
.csig
= hash_signature(self
.get_contents())
1212 return self
.ninfo
.csig
1214 def get_cachedir_csig(self
):
1215 return self
.get_csig()
1217 def get_stored_info(self
):
1220 def get_stored_implicit(self
):
1221 """Fetch the stored implicit dependencies"""
1228 def set_precious(self
, precious
: int = 1) -> None:
1229 """Set the Node's precious value."""
1230 self
.precious
= precious
1232 def set_pseudo(self
, pseudo
: bool = True) -> None:
1233 """Set the Node's precious value."""
1234 self
.pseudo
= pseudo
1236 def set_noclean(self
, noclean
: int = 1) -> None:
1237 """Set the Node's noclean value."""
1238 # Make sure noclean is an integer so the --debug=stree
1239 # output in Util.py can use it as an index.
1240 self
.noclean
= noclean
and 1 or 0
1242 def set_nocache(self
, nocache
: int = 1) -> None:
1243 """Set the Node's nocache value."""
1244 # Make sure nocache is an integer so the --debug=stree
1245 # output in Util.py can use it as an index.
1246 self
.nocache
= nocache
and 1 or 0
1248 def set_always_build(self
, always_build
: int = 1) -> None:
1249 """Set the Node's always_build value."""
1250 self
.always_build
= always_build
1252 def exists(self
) -> bool:
1253 """Does this node exists?"""
1254 return _exists_map
[self
._func
_exists
](self
)
1257 """Does this node exist locally or in a repository?"""
1258 # There are no repositories by default:
1259 return _rexists_map
[self
._func
_rexists
](self
)
1261 def get_contents(self
):
1262 """Fetch the contents of the entry."""
1263 return _get_contents_map
[self
._func
_get
_contents
](self
)
1265 def missing(self
) -> bool:
1266 return not self
.is_derived() and \
1267 not self
.linked
and \
1271 """Remove this Node: no-op by default."""
1274 def add_dependency(self
, depend
):
1275 """Adds dependencies."""
1277 self
._add
_child
(self
.depends
, self
.depends_set
, depend
)
1278 except TypeError as e
:
1281 s
= list(map(str, e
))
1284 raise SCons
.Errors
.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self
), s
, type(e
)))
1286 def add_prerequisite(self
, prerequisite
) -> None:
1287 """Adds prerequisites"""
1288 if self
.prerequisites
is None:
1289 self
.prerequisites
= UniqueList()
1290 self
.prerequisites
.extend(prerequisite
)
1291 self
._children
_reset
()
1293 def add_ignore(self
, depend
):
1294 """Adds dependencies to ignore."""
1296 self
._add
_child
(self
.ignore
, self
.ignore_set
, depend
)
1297 except TypeError as e
:
1300 s
= list(map(str, e
))
1303 raise SCons
.Errors
.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self
), s
, type(e
)))
1305 def add_source(self
, source
):
1307 if self
._specific
_sources
:
1310 self
._add
_child
(self
.sources
, self
.sources_set
, source
)
1311 except TypeError as e
:
1314 s
= list(map(str, e
))
1317 raise SCons
.Errors
.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self
), s
, type(e
)))
1319 def _add_child(self
, collection
, set, child
) -> None:
1320 """Adds 'child' to 'collection', first checking 'set' to see if it's
1326 collection
.append(c
)
1329 self
._children
_reset
()
1331 def set_specific_source(self
, source
) -> None:
1332 self
.add_source(source
)
1333 self
._specific
_sources
= True
1335 def add_wkid(self
, wkid
) -> None:
1336 """Add a node to the list of kids waiting to be evaluated"""
1337 if self
.wkids
is not None:
1338 self
.wkids
.append(wkid
)
1340 def _children_reset(self
) -> None:
1341 self
.clear_memoized_values()
1342 # We need to let the Executor clear out any calculated
1343 # build info that it's cached so we can re-calculate it.
1344 self
.executor_cleanup()
1346 @SCons.Memoize
.CountMethodCall
1347 def _children_get(self
):
1349 return self
._memo
['_children_get']
1353 # The return list may contain duplicate Nodes, especially in
1354 # source trees where there are a lot of repeated #includes
1355 # of a tangle of .h files. Profiling shows, however, that
1356 # eliminating the duplicates with a brute-force approach that
1357 # preserves the order (that is, something like:
1364 # takes more cycles than just letting the underlying methods
1365 # hand back cached values if a Node's information is requested
1366 # multiple times. (Other methods of removing duplicates, like
1367 # using dictionary keys, lose the order, and the only ordered
1368 # dictionary patterns I found all ended up using "not in"
1369 # internally anyway...)
1371 iter = chain
.from_iterable([_f
for _f
in [self
.sources
, self
.depends
, self
.implicit
] if _f
])
1375 if i
not in self
.ignore_set
:
1378 children
= self
.all_children(scan
=0)
1380 self
._memo
['_children_get'] = children
1383 def all_children(self
, scan
: int=1):
1384 """Return a list of all the node's direct children."""
1388 # The return list may contain duplicate Nodes, especially in
1389 # source trees where there are a lot of repeated #includes
1390 # of a tangle of .h files. Profiling shows, however, that
1391 # eliminating the duplicates with a brute-force approach that
1392 # preserves the order (that is, something like:
1399 # takes more cycles than just letting the underlying methods
1400 # hand back cached values if a Node's information is requested
1401 # multiple times. (Other methods of removing duplicates, like
1402 # using dictionary keys, lose the order, and the only ordered
1403 # dictionary patterns I found all ended up using "not in"
1404 # internally anyway...)
1405 return list(chain
.from_iterable([_f
for _f
in [self
.sources
, self
.depends
, self
.implicit
] if _f
]))
1407 def children(self
, scan
: int=1):
1408 """Return a list of the node's direct children, minus those
1409 that are ignored by this node."""
1412 return self
._children
_get
()
1414 def set_state(self
, state
) -> None:
1417 def get_state(self
):
1423 import SCons
.Defaults
1424 env
= SCons
.Defaults
.DefaultEnvironment()
1427 def Decider(self
, function
) -> None:
1429 for k
, v
in _decider_map
.items():
1434 foundkey
= len(_decider_map
)
1435 _decider_map
[foundkey
] = function
1436 self
.changed_since_last_build
= foundkey
1438 def Tag(self
, key
, value
) -> None:
1439 """ Add a user-defined tag. """
1442 self
._tags
[key
] = value
1444 def GetTag(self
, key
):
1445 """ Return a user-defined tag. """
1448 return self
._tags
.get(key
, None)
1450 def changed(self
, node
=None, allowcache
: bool=False):
1452 Returns if the node is up-to-date with respect to the BuildInfo
1453 stored last time it was built. The default behavior is to compare
1454 it against our own previously stored BuildInfo, but the stored
1455 BuildInfo from another Node (typically one in a Repository)
1456 can be used instead.
1458 Note that we now *always* check every dependency. We used to
1459 short-circuit the check by returning as soon as we detected
1460 any difference, but we now rely on checking every dependency
1461 to make sure that any necessary Node information (for example,
1462 the content signature of an #included .h file) is updated.
1464 The allowcache option was added for supporting the early
1465 release of the executor/builder structures, right after
1466 a File target was built. When set to true, the return
1467 value of this changed method gets cached for File nodes.
1468 Like this, the executor isn't needed any longer for subsequent
1471 @see: FS.File.changed(), FS.File.release_target_info()
1474 if t
: Trace('changed(%s [%s], %s)' % (self
, classname(self
), node
))
1480 bi
= node
.get_stored_info().binfo
1481 then
= bi
.bsourcesigs
+ bi
.bdependsigs
+ bi
.bimplicitsigs
1482 children
= self
.children()
1484 diff
= len(children
) - len(then
)
1486 # The old and new dependency lists are different lengths.
1487 # This always indicates that the Node must be rebuilt.
1488 # We also extend the old dependency list with enough None
1489 # entries to equal the new dependency list, for the benefit
1490 # of the loop below that updates node information.
1491 then
.extend([None] * diff
)
1492 if t
: Trace(': old %s new %s' % (len(then
), len(children
)))
1495 for child
, prev_ni
in zip(children
, then
):
1496 if _decider_map
[child
.changed_since_last_build
](child
, self
, prev_ni
, node
):
1497 if t
: Trace(': %s changed' % child
)
1500 if self
.has_builder():
1501 contents
= self
.get_executor().get_contents()
1502 newsig
= hash_signature(contents
)
1503 if bi
.bactsig
!= newsig
:
1504 if t
: Trace(': bactsig %s != newsig %s' % (bi
.bactsig
, newsig
))
1508 if t
: Trace(': up to date')
1514 def is_up_to_date(self
) -> bool:
1515 """Default check for whether the Node is current: unknown Node
1516 subtypes are always out of date, so they will always get built."""
1519 def children_are_up_to_date(self
) -> bool:
1520 """Alternate check for whether the Node is current: If all of
1521 our children were up-to-date, then this Node was up-to-date, too.
1523 The SCons.Node.Alias and SCons.Node.Python.Value subclasses
1524 rebind their current() method to this method."""
1525 # Allow the children to calculate their signatures.
1526 self
.binfo
= self
.get_binfo()
1527 if self
.always_build
:
1530 for kid
in self
.children(None):
1532 if s
and (not state
or s
> state
):
1534 return (state
== 0 or state
== SCons
.Node
.up_to_date
)
1536 def is_literal(self
) -> bool:
1537 """Always pass the string representation of a Node to
1538 the command interpreter literally."""
1541 def render_include_tree(self
):
1543 Return a text representation, suitable for displaying to the
1544 user, of the include tree for the sources of this node.
1546 if self
.is_derived():
1547 env
= self
.get_build_env()
1549 for s
in self
.sources
:
1550 scanner
= self
.get_source_scanner(s
)
1552 path
= self
.get_build_scanner_path(scanner
)
1555 def f(node
, env
=env
, scanner
=scanner
, path
=path
):
1556 return node
.get_found_includes(env
, scanner
, path
)
1557 return render_tree(s
, f
, 1)
1561 def get_abspath(self
):
1563 Return an absolute path to the Node. This will return simply
1564 str(Node) by default, but for Node types that have a concept of
1565 relative path, this might return something different.
1569 def for_signature(self
):
1571 Return a string representation of the Node that will always
1572 be the same for this particular Node, no matter what. This
1573 is by contrast to the __str__() method, which might, for
1574 instance, return a relative path for a file Node. The purpose
1575 of this method is to generate a value to be used in signature
1576 calculation for the command line used to build a target, and
1577 we use this method instead of str() to avoid unnecessary
1578 rebuilds. This method does not need to return something that
1579 would actually work in a command line; it can return any kind of
1580 nonsense, so long as it does not change.
1584 def get_string(self
, for_signature
):
1585 """This is a convenience function designed primarily to be
1586 used in command generators (i.e., CommandGeneratorActions or
1587 Environment variables that are callable), which are called
1588 with a for_signature argument that is nonzero if the command
1589 generator is being called to generate a signature for the
1590 command line, which determines if we should rebuild or not.
1592 Such command generators should use this method in preference
1593 to str(Node) when converting a Node to a string, passing
1594 in the for_signature parameter, such that we will call
1595 Node.for_signature() or str(Node) properly, depending on whether
1596 we are calculating a signature or actually constructing a
1599 return self
.for_signature()
1602 def get_subst_proxy(self
):
1604 This method is expected to return an object that will function
1605 exactly like this Node, except that it implements any additional
1606 special features that we would like to be in effect for
1607 Environment variable substitution. The principle use is that
1608 some Nodes would like to implement a __getattr__() method,
1609 but putting that in the Node type itself has a tendency to kill
1610 performance. We instead put it in a proxy and return it from
1611 this method. It is legal for this method to return self
1612 if no new functionality is needed for Environment substitution.
1617 if not self
.exists():
1618 return "building `%s' because it doesn't exist\n" % self
1620 if self
.always_build
:
1621 return "rebuilding `%s' because AlwaysBuild() is specified\n" % self
1623 old
= self
.get_stored_info()
1628 old
.prepare_dependencies()
1631 old_bkids
= old
.bsources
+ old
.bdepends
+ old
.bimplicit
1632 old_bkidsigs
= old
.bsourcesigs
+ old
.bdependsigs
+ old
.bimplicitsigs
1633 except AttributeError:
1634 return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self
1636 new
= self
.get_binfo()
1638 new_bkids
= new
.bsources
+ new
.bdepends
+ new
.bimplicit
1639 new_bkidsigs
= new
.bsourcesigs
+ new
.bdependsigs
+ new
.bimplicitsigs
1641 osig
= dict(list(zip(old_bkids
, old_bkidsigs
)))
1642 nsig
= dict(list(zip(new_bkids
, new_bkidsigs
)))
1644 # The sources and dependencies we'll want to report are all stored
1645 # as relative paths to this target's directory, but we want to
1646 # report them relative to the top-level SConstruct directory,
1647 # so we only print them after running them through this lambda
1648 # to turn them into the right relative Node and then return
1650 def stringify( s
, E
=self
.dir.Entry
):
1651 if hasattr( s
, 'dir' ) :
1657 removed
= [x
for x
in old_bkids
if x
not in new_bkids
]
1659 removed
= [stringify(r
) for r
in removed
]
1660 fmt
= "`%s' is no longer a dependency\n"
1661 lines
.extend([fmt
% s
for s
in removed
])
1664 if k
not in old_bkids
:
1665 lines
.append("`%s' is a new dependency\n" % stringify(k
))
1667 changed
= _decider_map
[k
.changed_since_last_build
](k
, self
, osig
[k
])
1670 lines
.append("`%s' changed\n" % stringify(k
))
1672 if len(lines
) == 0 and old_bkids
!= new_bkids
:
1673 lines
.append("the dependency order changed:\n")
1674 lines
.append("->Sources\n")
1675 for (o
,n
) in zip_longest(old
.bsources
, new
.bsources
, fillvalue
=None):
1676 lines
.append("Old:%s\tNew:%s\n"%(o
,n
))
1677 lines
.append("->Depends\n")
1678 for (o
,n
) in zip_longest(old
.bdepends
, new
.bdepends
, fillvalue
=None):
1679 lines
.append("Old:%s\tNew:%s\n"%(o
,n
))
1680 lines
.append("->Implicit\n")
1681 for (o
,n
) in zip_longest(old
.bimplicit
, new
.bimplicit
, fillvalue
=None):
1682 lines
.append("Old:%s\tNew:%s\n"%(o
,n
))
1685 def fmt_with_title(title
, strlines
):
1686 lines
= strlines
.split('\n')
1687 sep
= '\n' + ' '*(15 + len(title
))
1688 return ' '*15 + title
+ sep
.join(lines
) + '\n'
1689 if old
.bactsig
!= new
.bactsig
:
1690 if old
.bact
== new
.bact
:
1691 lines
.append("the contents of the build action changed\n" +
1692 fmt_with_title('action: ', new
.bact
))
1694 # lines.append("the contents of the build action changed [%s] [%s]\n"%(old.bactsig,new.bactsig) +
1695 # fmt_with_title('action: ', new.bact))
1697 lines
.append("the build action changed:\n" +
1698 fmt_with_title('old: ', old
.bact
) +
1699 fmt_with_title('new: ', new
.bact
))
1702 return "rebuilding `%s' for unknown reasons\n" % self
1704 preamble
= "rebuilding `%s' because" % self
1706 return "%s %s" % (preamble
, lines
[0])
1708 lines
= ["%s:\n" % preamble
] + lines
1709 return ( ' '*11).join(lines
)
1711 class NodeList(collections
.UserList
):
1712 def __str__(self
) -> str:
1713 return str(list(map(str, self
.data
)))
1715 def get_children(node
, parent
): return node
.children()
1716 def ignore_cycle(node
, stack
) -> None: pass
1717 def do_nothing(node
, parent
) -> None: pass
1720 """An iterator for walking a Node tree.
1722 This is depth-first, children are visited before the parent.
1723 The Walker object can be initialized with any node, and
1724 returns the next node on the descent with each get_next() call.
1725 get the children of a node instead of calling 'children'.
1726 'cycle_func' is an optional function that will be called
1727 when a cycle is detected.
1729 This class does not get caught in node cycles caused, for example,
1730 by C header file include loops.
1732 def __init__(self
, node
, kids_func
=get_children
,
1733 cycle_func
=ignore_cycle
,
1734 eval_func
=do_nothing
) -> None:
1735 self
.kids_func
= kids_func
1736 self
.cycle_func
= cycle_func
1737 self
.eval_func
= eval_func
1738 node
.wkids
= copy
.copy(kids_func(node
, None))
1740 self
.history
= {} # used to efficiently detect and avoid cycles
1741 self
.history
[node
] = None
1744 """Return the next node for this walk of the tree.
1746 This function is intentionally iterative, not recursive,
1747 to sidestep any issues of stack size limitations.
1751 if self
.stack
[-1].wkids
:
1752 node
= self
.stack
[-1].wkids
.pop(0)
1753 if not self
.stack
[-1].wkids
:
1754 self
.stack
[-1].wkids
= None
1755 if node
in self
.history
:
1756 self
.cycle_func(node
, self
.stack
)
1758 node
.wkids
= copy
.copy(self
.kids_func(node
, self
.stack
[-1]))
1759 self
.stack
.append(node
)
1760 self
.history
[node
] = None
1762 node
= self
.stack
.pop()
1763 del self
.history
[node
]
1766 parent
= self
.stack
[-1]
1769 self
.eval_func(node
, parent
)
1773 def is_done(self
) -> bool:
1774 return not self
.stack
1777 arg2nodes_lookups
= []
1781 # indent-tabs-mode:nil
1783 # vim: set expandtab tabstop=4 shiftwidth=4: