Skip to content

Commit

Permalink
Merge branch 'master' into rawfile-convert
Browse files Browse the repository at this point in the history
  • Loading branch information
bdbaddog authored Feb 14, 2024
2 parents 180c601 + b8fffb3 commit 3e60ee1
Show file tree
Hide file tree
Showing 11 changed files with 186 additions and 96 deletions.
8 changes: 8 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER
- CacheDir writes no longer happen within the taskmaster critical section,
and therefore can run in parallel with both other CacheDir writes and the
taskmaster DAG walk.
- The NewParallel scheduler now only adds threads as new work requiring execution
is discovered, up to the limit set by -j. This should reduce resource utilization
when the achievable parallelism in the DAG is less than the -j limit.

From Mats Wichmann:
- Add support for Python 3.13 (as of alpha 2). So far only affects
Expand All @@ -83,6 +86,11 @@ RELEASE VERSION/DATE TO BE FILLED IN LATER
- Improve handling of file data that SCons itself processes - try
harder to decode non-UTF-8 text. SCons.Util.to_Text now exists
to convert a byte stream, such as "raw" file data. Fixes #3569, #4462.
The Pseudo manpage entry was updated to provide more clarity.
- The internal routine which implements the PyPackageDir function
would fail with an exception if called with a module which is
not found. It will now return None. Updated manpage entry and
docstring..


RELEASE 4.6.0 - Sun, 19 Nov 2023 17:22:20 -0700
Expand Down
7 changes: 7 additions & 0 deletions RELEASE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ FIXES
but Pseudo() did not work; is now enabled.
- Improve handling of file data that SCons itself processes - as in
scanners - try harder to decode non-UTF-8 text.
- PyPackageDir no longer fails if passed a module name which cannot be found,
now returns None.


IMPROVEMENTS
------------
Expand All @@ -69,6 +72,9 @@ IMPROVEMENTS
(Larger -j values)
- CacheDir writes no longer happen within the taskmaster critical section, and therefore
can run in parallel with both other CacheDir writes and the taskmaster DAG walk.
- The NewParallel scheduler now only adds threads as new work requiring execution
is discovered, up to the limit set by -j. This should reduce resource utilization
when the achievable parallelism in the DAG is less than the -j limit.


PACKAGING
Expand All @@ -82,6 +88,7 @@ DOCUMENTATION
- Fixed the Scanner examples in the User Guide to be runnable and added
some more explantion. Clarified discussion of the scanner function in
the Scanner Objects section of the manpage.
- The manpage entry for Pseudo was clarified.

DEVELOPMENT
-----------
Expand Down
12 changes: 9 additions & 3 deletions SCons/Environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -2339,6 +2339,7 @@ def Local(self, *targets):
return ret

def Precious(self, *targets):
"""Mark *targets* as precious: do not delete before building."""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
Expand All @@ -2347,6 +2348,7 @@ def Precious(self, *targets):
return tlist

def Pseudo(self, *targets):
"""Mark *targets* as pseudo: must not exist."""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
Expand All @@ -2355,13 +2357,17 @@ def Pseudo(self, *targets):
return tlist

def Repository(self, *dirs, **kw) -> None:
"""Specify Repository directories to search."""
dirs = self.arg2nodes(list(dirs), self.fs.Dir)
self.fs.Repository(*dirs, **kw)

def Requires(self, target, prerequisite):
"""Specify that 'prerequisite' must be built before 'target',
(but 'target' does not actually depend on 'prerequisite'
and need not be rebuilt if it changes)."""
"""Specify that *prerequisite* must be built before *target*.
Creates an order-only relationship, not a full dependency.
*prerequisite* must exist before *target* can be built, but
a change to *prerequisite* does not trigger a rebuild of *target*.
"""
tlist = self.arg2nodes(target, self.fs.Entry)
plist = self.arg2nodes(prerequisite, self.fs.Entry)
for t in tlist:
Expand Down
47 changes: 36 additions & 11 deletions SCons/Environment.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2880,20 +2880,41 @@ and &f-link-env-Prepend;.
</arguments>
<summary>
<para>
This returns a Directory Node similar to Dir.
The python module / package is looked up and if located
the directory is returned for the location.
<parameter>modulename</parameter>
Is a named python package / module to
lookup the directory for it's location.
</para>
<para>
If
<parameter>modulename</parameter>
is a list, SCons returns a list of Dir nodes.
Finds the location of <parameter>modulename</parameter>,
which can be a string or a sequence of strings,
each representing the name of a &Python; module.
Construction variables are expanded in
<parameter>modulename</parameter>.
Returns a Directory Node (see &f-link-Dir;),
or a list of Directory Nodes if
<parameter>modulename</parameter> is a sequence.
<literal>None</literal> is returned for any module not found.
</para>

<para>
When a Tool module which is installed as a
&Python; module is used, you need
to specify a <parameter>toolpath</parameter> argument to
&f-link-Tool;,
&f-link-Environment;
or &f-link-Clone;,
as tools outside the standard project locations
(<filename>site_scons/site_tools</filename>)
will not be found otherwise.
Using &f-PyPackageDir; allows this path to be
discovered at runtime instead of hardcoding the path.
</para>

<para>
Example:
</para>

<example_commands>
env = Environment(
tools=["default", "ExampleTool"],
toolpath=[PyPackageDir("example_tool")]
)
</example_commands>
</summary>
</scons_function>

Expand Down Expand Up @@ -2988,6 +3009,10 @@ but the target file(s) do not actually
depend on the prerequisites
and will not be rebuilt simply because
the prerequisite file(s) change.
<parameter>target</parameter> and
<parameter>prerequisite</parameter> may each
be a string or Node, or a list of strings or Nodes.
Returns a list of the affected target nodes.
</para>

<para>
Expand Down
31 changes: 17 additions & 14 deletions SCons/Node/FS.py
Original file line number Diff line number Diff line change
Expand Up @@ -1295,7 +1295,7 @@ def get_root(self, drive):
self.Root[''] = root
return root

def _lookup(self, p, directory, fsclass, create: int=1):
def _lookup(self, p, directory, fsclass, create: bool = True):
"""
The generic entry point for Node lookup with user-supplied data.
Expand Down Expand Up @@ -1431,7 +1431,7 @@ def _lookup(self, p, directory, fsclass, create: int=1):

return root._lookup_abs(p, fsclass, create)

def Entry(self, name, directory = None, create: int = 1):
def Entry(self, name, directory = None, create: bool = True):
"""Look up or create a generic Entry node with the specified name.
If the name is a relative path (begins with ./, ../, or a file
name), then it is looked up relative to the supplied directory
Expand All @@ -1440,7 +1440,7 @@ def Entry(self, name, directory = None, create: int = 1):
"""
return self._lookup(name, directory, Entry, create)

def File(self, name, directory = None, create: int = 1):
def File(self, name, directory = None, create: bool = True):
"""Look up or create a File node with the specified name. If
the name is a relative path (begins with ./, ../, or a file name),
then it is looked up relative to the supplied directory node,
Expand Down Expand Up @@ -1487,21 +1487,24 @@ def Repository(self, *dirs) -> None:
d = self.Dir(d)
self.Top.addRepository(d)

def PyPackageDir(self, modulename):
r"""Locate the directory of a given python module name
def PyPackageDir(self, modulename) -> Optional[Dir]:
r"""Locate the directory of Python module *modulename*.
For example scons might resolve to
Windows: C:\Python27\Lib\site-packages\scons-2.5.1
Linux: /usr/lib/scons
For example 'SCons' might resolve to
Windows: C:\Python311\Lib\site-packages\SCons
Linux: /usr/lib64/python3.11/site-packages/SCons
This can be useful when we want to determine a toolpath based on a python module name"""
Can be used to determine a toolpath based on a Python module name.
dirpath = ''

# Python3 Code
This is the backend called by the public API function
:meth:`~Environment.Base.PyPackageDir`.
"""
modspec = importlib.util.find_spec(modulename)
dirpath = os.path.dirname(modspec.origin)
return self._lookup(dirpath, None, Dir, True)
if modspec:
origin = os.path.dirname(modspec.origin)
return self._lookup(origin, directory=None, fsclass=Dir, create=True)
else:
return None


def variant_dir_target_climb(self, orig, dir, tail):
Expand Down
17 changes: 17 additions & 0 deletions SCons/Node/FSTests.py
Original file line number Diff line number Diff line change
Expand Up @@ -4046,6 +4046,23 @@ def test_root_lookup_equivalence(self) -> None:
os.chdir(save_cwd)


class PyPackageDir(unittest.TestCase):
def runTest(self) -> None:
"""Test calling the PyPackageDir() method.
We don't want to mock the positive case here - there's
testing for that in E2E test test/Dir/PyPackageDir.
We're only making sure we don't die in the negative case
(module not found) and instead return None.
"""
fs = SCons.Node.FS.FS('/')
try:
pkdir = fs.PyPackageDir("garglemod")
except AttributeError:
self.fail("non-existent module raised AttributeError")
self.assertIsNone(pkdir)


if __name__ == "__main__":
unittest.main()

Expand Down
4 changes: 2 additions & 2 deletions SCons/Node/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1230,7 +1230,7 @@ def set_precious(self, precious: int = 1) -> None:
self.precious = precious

def set_pseudo(self, pseudo: bool = True) -> None:
"""Set the Node's precious value."""
"""Set the Node's pseudo value."""
self.pseudo = pseudo

def set_noclean(self, noclean: int = 1) -> None:
Expand All @@ -1250,7 +1250,7 @@ def set_always_build(self, always_build: int = 1) -> None:
self.always_build = always_build

def exists(self) -> bool:
"""Does this node exists?"""
"""Reports whether node exists."""
return _exists_map[self._func_exists](self)

def rexists(self):
Expand Down
41 changes: 24 additions & 17 deletions SCons/Script/Main.xml
Original file line number Diff line number Diff line change
Expand Up @@ -725,13 +725,12 @@ Progress(['-\r', '\\\r', '|\r', '/\r'], interval=5)
</arguments>
<summary>
<para>
Marks each given
<varname>target</varname>
as precious so it is not deleted before it is rebuilt. Normally
&scons;
deletes a target before building it.
Multiple targets can be passed in to a single call to
&f-Precious;.
Marks <varname>target</varname> as precious so it is not
deleted before it is rebuilt.
Normally &SCons; deletes a target before building it.
Multiple targets can be passed in a single call,
and may be strings and/or nodes.
Returns a list of the affected target nodes.
</para>
</summary>
</scons_function>
Expand All @@ -742,16 +741,24 @@ Multiple targets can be passed in to a single call to
</arguments>
<summary>
<para>
This indicates that each given
<varname>target</varname>
should not be created by the build rule, and if the target is created,
an error will be generated. This is similar to the gnu make .PHONY
target. However, in the vast majority of cases, an
&f-Alias;
is more appropriate.

Multiple targets can be passed in to a single call to
&f-Pseudo;.
Marks <parameter>target</parameter> as a pseudo target,
not representing the production of any physical target file.
If any pseudo <parameter>target</parameter> does exist,
&SCons; will abort the build with an error.
Multiple targets can be passed in a single call,
and may be strings and/or Nodes.
Returns a list of the affected target nodes.
</para>

<para>
&f-Pseudo; may be useful in conjuction with a builder
call (such as &f-link-Command;) which does not create a physical target,
and the behavior if the target accidentally existed would be incorrect.
This is similar in concept to the GNU <application>make</application>
<literal>.PHONY</literal> target.
&SCons; also provides a powerful target alias capability
(see &f-link-Alias;) which may provide more flexibility
in many situations when defining target names that are not directly built.
</para>
</summary>
</scons_function>
Expand Down
36 changes: 23 additions & 13 deletions SCons/Taskmaster/Job.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ def __exit__(self, *args):

def __init__(self, taskmaster, num, stack_size) -> None:
self.taskmaster = taskmaster
self.num_workers = num
self.max_workers = num
self.stack_size = stack_size
self.interrupted = InterruptState()
self.workers = []
Expand All @@ -484,7 +484,7 @@ def __init__(self, taskmaster, num, stack_size) -> None:
# also protects access to our state that gets updated
# concurrently. The `can_search_cv` is associated with
# this mutex.
self.tm_lock = (threading.Lock if self.num_workers > 1 else NewParallel.FakeLock)()
self.tm_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()

# Guarded under `tm_lock`.
self.jobs = 0
Expand All @@ -493,11 +493,11 @@ def __init__(self, taskmaster, num, stack_size) -> None:
# The `can_search_cv` is used to manage a leader /
# follower pattern for access to the taskmaster, and to
# awaken from stalls.
self.can_search_cv = (threading.Condition if self.num_workers > 1 else NewParallel.FakeCondition)(self.tm_lock)
self.can_search_cv = (threading.Condition if self.max_workers > 1 else NewParallel.FakeCondition)(self.tm_lock)

# The queue of tasks that have completed execution. The
# next thread to obtain `tm_lock`` will retire them.
self.results_queue_lock = (threading.Lock if self.num_workers > 1 else NewParallel.FakeLock)()
self.results_queue_lock = (threading.Lock if self.max_workers > 1 else NewParallel.FakeLock)()
self.results_queue = []

if self.taskmaster.trace:
Expand All @@ -516,22 +516,27 @@ def trace_message(self, message) -> None:
method_name = sys._getframe(1).f_code.co_name + "():"
thread_id=threading.get_ident()
self.trace.debug('%s.%s [Thread:%s] %s' % (type(self).__name__, method_name, thread_id, message))
# print('%-15s %s' % (method_name, message))

def start(self) -> None:
if self.num_workers == 1:
if self.max_workers == 1:
self._work()
else:
self._start_workers()
for worker in self.workers:
worker.join()
self.workers = []
self._start_worker()
while len(self.workers) > 0:
self.workers[0].join()
self.workers.pop(0)
self.taskmaster.cleanup()

def _start_workers(self) -> None:
def _maybe_start_worker(self) -> None:
if self.max_workers > 1 and len(self.workers) < self.max_workers:
if self.jobs >= len(self.workers):
self._start_worker()

def _start_worker(self) -> None:
prev_size = self._adjust_stack_size()
for _ in range(self.num_workers):
self.workers.append(NewParallel.Worker(self))
if self.trace:
self.trace_message("Starting new worker thread")
self.workers.append(NewParallel.Worker(self))
self._restore_stack_size(prev_size)

def _adjust_stack_size(self):
Expand Down Expand Up @@ -680,6 +685,11 @@ def _work(self):
self.trace_message("Found task requiring execution")
self.state = NewParallel.State.READY
self.can_search_cv.notify()
# This thread will be busy taking care of
# `execute`ing this task. If we haven't
# reached the limit, spawn a new thread to
# turn the crank and find the next task.
self._maybe_start_worker()

else:
# We failed to find a task, so this thread
Expand Down
Loading

0 comments on commit 3e60ee1

Please sign in to comment.