Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

First pass at 'spew' debug mode #228

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
170 changes: 111 additions & 59 deletions src/webassets/bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ def __init__(self, *contents, **options):
self.depends = options.pop('depends', [])
self.version = options.pop('version', [])
self.extra = options.pop('extra', {})
self.spew_output = options.pop('spew_output', None)
self.spew_ext = options.pop('spew_ext', None)
if options:
raise TypeError("got unexpected keyword argument '%s'" %
options.keys()[0])
Expand Down Expand Up @@ -379,11 +381,11 @@ def _merge_and_apply(self, env, output, force, parent_debug=None,
for item, cnt in resolved_contents:
if isinstance(cnt, Bundle):
# Recursively process nested bundles.
hunk = cnt._merge_and_apply(
subhunks = cnt._merge_and_apply(
env, output, force, current_debug_level,
filters_to_pass_down, disable_cache=disable_cache)
if hunk is not None:
hunks.append((hunk, {}))
if subhunks is not None:
hunks.extend(subhunks)

else:
# Give a filter the chance to open his file.
Expand Down Expand Up @@ -428,29 +430,54 @@ def _merge_and_apply(self, env, output, force, parent_debug=None,
if len(hunks) == 0:
return None

# Merge the individual files together. There is an optional hook for
# a filter here, by implementing a concat() method.
try:
should_spew = current_debug_level == 'spew'

if not should_spew:
# Merge the individual files together. There is an optional hook for
# a filter here, by implementing a concat() method.
try:
final = filtertool.apply_func(filters_to_run, 'concat', [hunks])
except MoreThanOneFilterError, e:
try:
final = filtertool.apply_func(
filters_to_run, 'concat', [hunks])
except MoreThanOneFilterError, e:
raise BuildError(e)
except NoFilters:
final = merge([h for h, _ in hunks])
except IOError, e:
# IOErrors can be raised here if hunks are loaded for the
# first time. TODO: IOErrors can also be raised when
# a file is read during the filter-apply phase, but we don't
# convert it to a BuildError there...
raise BuildError(e)
except NoFilters:
final = merge([h for h, _ in hunks])
except IOError, e:
# IOErrors can be raised here if hunks are loaded for the
# first time. TODO: IOErrors can also be raised when
# a file is read during the filter-apply phase, but we don't
# convert it to a BuildError there...
raise BuildError(e)

hunks = [(final, {})]

# Apply output filters.
# TODO: So far, all the situations where bundle dependencies are
# used/useful, are based on input filters having those dependencies. Is
# it even required to consider them here with respect to the cache? We
# might be able to run this operation with the cache on (the FilterTool
# being possibly configured with cache reads off).
return filtertool.apply(final, selected_filters, 'output')
return [(filtertool.apply(hunk, selected_filters, 'output'), item_data)
for hunk, item_data in hunks]

def _resolve_spew_output(self, env, source_path, spew_output=None,
spew_ext=None):
spew_output = spew_output or self.spew_output
spew_ext = spew_ext or self.spew_ext

basename = None
if is_url(source_path):
parsed = urlparse.urlsplit(source_path)
basename = path.join(parsed.netloc, parsed.path[1:])
else:
basename = path.relpath(source_path, env.directory)

if spew_ext is not None:
basename, _ = path.splitext(basename)
basename += spew_ext

return path.join(spew_output, basename)

def _build(self, env, extra_filters=[], force=None, output=None,
disable_cache=None):
Expand Down Expand Up @@ -493,52 +520,66 @@ def _build(self, env, extra_filters=[], force=None, output=None,
# We can simply return the existing output file
return FileHunk(self.resolve_output(env, self.output))

hunk = self._merge_and_apply(
hunks = self._merge_and_apply(
env, [self.output, self.resolve_output(env, version='?')],
force, disable_cache=disable_cache, extra_filters=extra_filters)
if hunk is None:
if hunks is None:
raise BuildError('Nothing to build for %s, is empty' % self)

if output:
# If we are given a stream, just write to it.
output.write(hunk.data())
else:
# If it doesn't exist yet, create the target directory.
output = path.join(env.directory, self.output)
output_dir = path.dirname(output)
if not path.exists(output_dir):
os.makedirs(output_dir)
debug_level = _effective_debug_level(env, self)

version = None
if env.versions:
version = env.versions.determine_version(self, env, hunk)
if debug_level == 'spew':
for hunk, item_data in hunks:
output = self._resolve_spew_output(
env, item_data['source_path'])
output = env.resolver.resolve_output_to_path(output, self)
output_dir = path.dirname(output)
if not path.exists(output_dir):
os.makedirs(output_dir)

if not has_placeholder(self.output):
hunk.save(self.resolve_output(env))
else:
if not env.versions:
raise BuildError((
'You have not set the "versions" option, but %s '
'uses a version placeholder in the output target'
% self))
output = self.resolve_output(env, version=version)
hunk.save(output)
self.version = version
else:
[(hunk, _)] = hunks
if output:
# If we are given a stream, just write to it.
output.write(hunk.data())
else:
# If it doesn't exist yet, create the target directory.
output = path.join(env.directory, self.output)
output_dir = path.dirname(output)
if not path.exists(output_dir):
os.makedirs(output_dir)

if env.manifest:
env.manifest.remember(self, env, version)
if env.versions and version:
# Hook for the versioner (for example set the timestamp of
# the file) to the actual version.
env.versions.set_version(self, env, output, version)
version = None
if env.versions:
version = env.versions.determine_version(self, env, hunk)

if not has_placeholder(self.output):
hunk.save(self.resolve_output(env))
else:
if not env.versions:
raise BuildError((
'You have not set the "versions" option, but %s '
'uses a version placeholder in the output target'
% self))
output = self.resolve_output(env, version=version)
hunk.save(output)
self.version = version

if env.manifest:
env.manifest.remember(self, env, version)
if env.versions and version:
# Hook for the versioner (for example set the timestamp of
# the file) to the actual version.
env.versions.set_version(self, env, output, version)

# The updater may need to know this bundle exists and how it
# has been last built, in order to detect changes in the
# bundle definition, like new source files.
if env.updater:
env.updater.build_done(self, env)

return hunk
return [hunk for hunk, _ in hunks]

def build(self, env=None, force=None, output=None, disable_cache=None):
"""Build this bundle, meaning create the file given by the ``output``
Expand All @@ -558,7 +599,7 @@ def build(self, env=None, force=None, output=None, disable_cache=None):
env = self._get_env(env)
hunks = []
for bundle, extra_filters in self.iterbuild(env):
hunks.append(bundle._build(
hunks.extend(bundle._build(
env, extra_filters, force=force, output=output,
disable_cache=disable_cache))
return hunks
Expand Down Expand Up @@ -621,27 +662,38 @@ def _urls(self, env, extra_filters, *args, **kwargs):
# are built as well of course, so at this point we leave the urls()
# recursion and start a build() recursion.
debug = _effective_debug_level(env, self, extra_filters)
if debug == 'merge':
supposed_to_merge = True
elif debug is True:
supposed_to_merge = False
elif debug is False:
supposed_to_merge = True
else:
raise BundleError('Invalid debug value: %s' % debug)

# We will output a single url for this bundle unless a) the
# configuration tells us to output the source urls
# ("supposed_to_merge"), or b) this bundle isn't actually configured to
# be built, that is, has no filters and no output target.
if supposed_to_merge and (self.filters or self.output):
if (debug == 'merge' or debug is False) and\
(self.filters or self.output):
# With ``auto_build``, build the bundle to make sure the output is
# up to date; otherwise, we just assume the file already exists.
# (not wasting any IO ops)
if env.auto_build:
self._build(env, extra_filters=extra_filters, force=False,
*args, **kwargs)
return [self._make_output_url(env)]
elif debug == 'spew':
spew_output = kwargs.pop('spew_output', self.spew_output)
spew_ext = kwargs.pop('spew_ext', self.spew_ext)

if self.filters and self.spew_output:
self._build(env, extra_filters=extra_filters, force=False,
*args, **kwargs)

urls = []
for org, cnt in self.resolve_contents(env):
if isinstance(cnt, Bundle):
urls.extend(org.urls(env, *args, spew_output=spew_output,
spew_ext=spew_ext, **kwargs))
else:
output = self._resolve_spew_output(env, cnt,
spew_output=spew_output, spew_ext=spew_ext)
urls.append(env.resolver.resolve_output_to_url(output))
return urls
else:
# We either have no files (nothing to build), or we are
# in debug mode: Instead of building the bundle, we
Expand Down Expand Up @@ -756,7 +808,7 @@ def _effective_debug_level(env, bundle, extra_filters=None, default=None):
# declare they should always run puts the bundle automatically in
# merge mode.
filters = merge_filters(bundle.filters, extra_filters)
level = 'merge' if select_filters(filters, True) else None
level = 'spew' if select_filters(filters, True) else None

if level is not None:
# The new level must be lower than the older one. We do not thrown an
Expand Down
7 changes: 6 additions & 1 deletion src/webassets/filter/less.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ class Less(ExternalTool):
options = {
'less': ('binary', 'LESS_BIN'),
'run_in_debug': 'LESS_RUN_IN_DEBUG',
'extra_args': 'LESS_EXTRA_ARGS',
}
max_debug_level = None

Expand All @@ -79,5 +80,9 @@ def setup(self):

def input(self, in_, out, source_path, **kw):
# Set working directory to the source file so that includes are found
args = [self.less or 'lessc']
if self.extra_args:
args.extend(self.extra_args)
args.append('-')
with working_directory(filename=source_path):
self.subprocess([self.less or 'lessc', '-'], out, in_)
self.subprocess(args, out, in_)
2 changes: 1 addition & 1 deletion src/webassets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def __new__(mcs, name, bases, attrs):
def cmp_debug_levels(level1, level2):
"""cmp() for debug levels, returns -1, 0 or +1 indicating which debug
level is higher than the other one."""
level_ints = { False: 0, 'merge': 1, True: 2 }
level_ints = { False: 0, 'merge': 1, 'spew': 2, True: 3 }
try:
return cmp(level_ints[level1], level_ints[level2])
except KeyError, e:
Expand Down