From a4fcb9a4264df595ad4c0d1dbaf6f82ecfa15077 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:02:42 -0700 Subject: [PATCH 01/23] Vendor webassets Per discussion in https://github.com/miracle2k/webassets/pull/553 --- pelican/plugins/webassets/vendor/__init__.py | 0 .../webassets/vendor/webassets/__init__.py | 6 + .../webassets/vendor/webassets/bundle.py | 927 ++++++++++++++++++ .../webassets/vendor/webassets/cache.py | 239 +++++ .../plugins/webassets/vendor/webassets/env.py | 847 ++++++++++++++++ .../webassets/vendor/webassets/exceptions.py | 32 + .../vendor/webassets/ext/__init__.py | 0 .../webassets/vendor/webassets/ext/jinja2.py | 255 +++++ .../vendor/webassets/filter/__init__.py | 737 ++++++++++++++ .../vendor/webassets/filter/autoprefixer.py | 85 ++ .../vendor/webassets/filter/babel.py | 77 ++ .../vendor/webassets/filter/cleancss.py | 49 + .../vendor/webassets/filter/clevercss.py | 24 + .../vendor/webassets/filter/closure.py | 75 ++ .../webassets/filter/closure_stylesheets.py | 50 + .../webassets/filter/closure_templates.py | 100 ++ .../vendor/webassets/filter/coffeescript.py | 62 ++ .../vendor/webassets/filter/compass.py | 255 +++++ .../vendor/webassets/filter/cssmin.py | 26 + .../vendor/webassets/filter/cssprefixer.py | 25 + .../webassets/filter/cssrewrite/__init__.py | 110 +++ .../webassets/filter/cssrewrite/base.py | 118 +++ .../webassets/filter/cssrewrite/urlpath.py | 269 +++++ .../vendor/webassets/filter/cssutils.py | 34 + .../vendor/webassets/filter/datauri.py | 71 ++ .../webassets/vendor/webassets/filter/dust.py | 57 ++ .../vendor/webassets/filter/handlebars.py | 75 ++ .../webassets/vendor/webassets/filter/jade.py | 130 +++ .../vendor/webassets/filter/jinja2.py | 42 + .../vendor/webassets/filter/jsmin.py | 44 + .../webassets/filter/jspacker/__init__.py | 24 + .../webassets/filter/jspacker/jspacker.py | 577 +++++++++++ .../webassets/vendor/webassets/filter/jst.py | 190 ++++ .../webassets/vendor/webassets/filter/less.py | 145 +++ .../vendor/webassets/filter/less_ruby.py | 84 ++ .../vendor/webassets/filter/libsass.py | 104 ++ .../vendor/webassets/filter/node_sass.py | 105 ++ .../vendor/webassets/filter/postcss.py | 58 ++ .../vendor/webassets/filter/pyscss.py | 156 +++ .../vendor/webassets/filter/rcssmin.py | 36 + .../vendor/webassets/filter/replace.py | 52 + .../vendor/webassets/filter/requirejs.py | 168 ++++ .../webassets/filter/rjsmin/__init__.py | 35 + .../vendor/webassets/filter/rjsmin/rjsmin.py | 543 ++++++++++ .../webassets/vendor/webassets/filter/sass.py | 166 ++++ .../vendor/webassets/filter/sass_ruby.py | 225 +++++ .../vendor/webassets/filter/slimit.py | 31 + .../vendor/webassets/filter/slimmer.py | 26 + .../vendor/webassets/filter/spritemapper.py | 125 +++ .../vendor/webassets/filter/stylus.py | 52 + .../vendor/webassets/filter/typescript.py | 60 ++ .../vendor/webassets/filter/uglifyjs.py | 32 + .../webassets/vendor/webassets/filter/yui.py | 54 + .../webassets/vendor/webassets/importlib.py | 38 + .../webassets/vendor/webassets/loaders.py | 338 +++++++ .../webassets/vendor/webassets/merge.py | 356 +++++++ .../webassets/vendor/webassets/py.typed | 0 .../webassets/vendor/webassets/script.py | 582 +++++++++++ .../plugins/webassets/vendor/webassets/six.py | 417 ++++++++ .../webassets/vendor/webassets/test.py | 154 +++ .../webassets/vendor/webassets/updater.py | 192 ++++ .../webassets/vendor/webassets/utils.py | 249 +++++ .../webassets/vendor/webassets/version.py | 323 ++++++ 63 files changed, 10518 insertions(+) create mode 100644 pelican/plugins/webassets/vendor/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/bundle.py create mode 100644 pelican/plugins/webassets/vendor/webassets/cache.py create mode 100644 pelican/plugins/webassets/vendor/webassets/env.py create mode 100644 pelican/plugins/webassets/vendor/webassets/exceptions.py create mode 100644 pelican/plugins/webassets/vendor/webassets/ext/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/ext/jinja2.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py create mode 100755 pelican/plugins/webassets/vendor/webassets/filter/babel.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cleancss.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/clevercss.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/closure.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/compass.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssmin.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/urlpath.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/cssutils.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/datauri.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/dust.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/handlebars.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jade.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jinja2.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jsmin.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jspacker/jspacker.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/jst.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/less.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/libsass.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/node_sass.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/postcss.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/pyscss.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/replace.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/requirejs.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py create mode 100755 pelican/plugins/webassets/vendor/webassets/filter/rjsmin/rjsmin.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/sass.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/slimit.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/slimmer.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/stylus.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/typescript.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py create mode 100644 pelican/plugins/webassets/vendor/webassets/filter/yui.py create mode 100644 pelican/plugins/webassets/vendor/webassets/importlib.py create mode 100644 pelican/plugins/webassets/vendor/webassets/loaders.py create mode 100644 pelican/plugins/webassets/vendor/webassets/merge.py create mode 100644 pelican/plugins/webassets/vendor/webassets/py.typed create mode 100644 pelican/plugins/webassets/vendor/webassets/script.py create mode 100644 pelican/plugins/webassets/vendor/webassets/six.py create mode 100644 pelican/plugins/webassets/vendor/webassets/test.py create mode 100644 pelican/plugins/webassets/vendor/webassets/updater.py create mode 100644 pelican/plugins/webassets/vendor/webassets/utils.py create mode 100644 pelican/plugins/webassets/vendor/webassets/version.py diff --git a/pelican/plugins/webassets/vendor/__init__.py b/pelican/plugins/webassets/vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pelican/plugins/webassets/vendor/webassets/__init__.py b/pelican/plugins/webassets/vendor/webassets/__init__.py new file mode 100644 index 0000000..0433191 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/__init__.py @@ -0,0 +1,6 @@ +__version__ = "3.0.0a1" + + +# Make a couple frequently used things available right here. +from .bundle import Bundle +from .env import Environment diff --git a/pelican/plugins/webassets/vendor/webassets/bundle.py b/pelican/plugins/webassets/vendor/webassets/bundle.py new file mode 100644 index 0000000..bbb23d8 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/bundle.py @@ -0,0 +1,927 @@ +from contextlib import contextmanager +import os +from os import path +from webassets import six +from webassets.six.moves import map +from webassets.six.moves import zip + +from .filter import get_filter +from .merge import (FileHunk, UrlHunk, FilterTool, merge, merge_filters, + select_filters, MoreThanOneFilterError, NoFilters) +from .updater import SKIP_CACHE +from .exceptions import BundleError, BuildError +from .utils import cmp_debug_levels, hash_func +from .env import ConfigurationContext, DictConfigStorage, BaseEnvironment +from .utils import is_url, calculate_sri_on_file + + +__all__ = ('Bundle', 'get_all_bundle_files',) + + +def has_placeholder(s): + return '%(version)s' in s + + +class ContextWrapper(object): + """Implements a hierarchy-aware configuration context. + + Since each bundle can provide settings that augment the values of + the parent bundle, and ultimately the environment, as the bundle + hierarchy is processed, this class is used to provide an interface + that searches through the hierarchy of settings. It's what you get + when you are given a ``ctx`` value. + """ + + def __init__(self, parent, overwrites=None): + self._parent, self._overwrites = parent, overwrites + + def __getitem__(self, key): + try: + if self._overwrites is None: + raise KeyError() + return self._overwrites.config[key] + except KeyError: + return self._parent.config.get(key) + + def __getattr__(self, item): + try: + return self.getattr(self._overwrites, item) + except (KeyError, AttributeError, EnvironmentError): + return self.getattr(self._parent, item) + + def getattr(self, object, item): + # Helper because Bundles are special in that the config attributes + # are in bundle.config (bundle.config.url vs env.url or ctx.url). + if isinstance(object, Bundle): + return getattr(object.config, item) + else: + return getattr(object, item) + + def get(self, key, default=None): + try: + return self.__getitem__(key) + except KeyError: + return default + + @property + def environment(self): + """Find the root environment context.""" + if isinstance(self._parent, BaseEnvironment): + return self._parent + return self._parent.environment + + +def wrap(parent, overwrites): + """Return a context object where the values from ``overwrites`` + augment the ``parent`` configuration. See :class:`ContextWrapper`. + """ + return ContextWrapper(parent, overwrites) + + +class BundleConfig(DictConfigStorage, ConfigurationContext): + """A configuration dict that also supports Environment-like attribute + access, i.e. ``config['resolver']`` and ``config.resolver``. + """ + def __init__(self, bundle): + DictConfigStorage.__init__(self, bundle) + ConfigurationContext.__init__(self, self) + + +class Bundle(object): + """A bundle is the unit webassets uses to organize groups of media files, + which filters to apply and where to store them. + + Bundles can be nested arbitrarily. + + A note on the connection between a bundle and an "environment" instance: + The bundle requires a environment that it belongs to. Without an + environment, it lacks information about how to behave, and cannot know + where relative paths are actually based. However, I don't want to make the + ``Bundle.__init__`` syntax more complicated than it already is by requiring + an Environment object to be passed. This would be a particular nuisance + when nested bundles are used. Further, nested bundles are never explicitly + connected to an Environment, and what's more, the same child bundle can be + used in multiple parent bundles. + + This is the reason why basically every method of the Bundle class takes an + ``env`` parameter - so a parent bundle can provide the environment for + child bundles that do not know it. + """ + + def __init__(self, *contents, **options): + self._env = options.pop('env', None) + self.contents = contents + self.output = options.pop('output', None) + self.filters = options.pop('filters', None) + self.depends = options.pop('depends', []) + self.version = options.pop('version', []) + self.remove_duplicates = options.pop('remove_duplicates', True) + self.extra = options.pop('extra', {}) + self.merge = options.pop('merge', True) + + self._config = BundleConfig(self) + self._config.update(options.pop('config', {})) + if 'debug' in options: + debug = options.pop('debug') + if debug is not None: + self._config['debug'] = debug + + if options: + raise TypeError("got unexpected keyword argument '%s'" % + list(options.keys())[0]) + + def __repr__(self): + return "<%s output=%s, filters=%s, contents=%s>" % ( + self.__class__.__name__, + self.output, + self.filters, + self.contents, + ) + + @property + def config(self): + # This is a property so that user are not tempted to assign + # a custom dictionary which won't uphold our caseless semantics. + return self._config + + def _get_debug(self): + return self.config.get('debug', None) + def _set_debug(self, value): + self.config['debug'] = value + debug = property(_get_debug, _set_debug) + + def _get_filters(self): + return self._filters + def _set_filters(self, value): + """Filters may be specified in a variety of different ways, including + by giving their name; we need to make sure we resolve everything to an + actual filter instance. + """ + if value is None: + self._filters = () + return + + if isinstance(value, six.string_types): + # 333: Simplify w/o condition? + if six.PY3: + filters = map(str.strip, value.split(',')) + else: + filters = map(unicode.strip, unicode(value).split(',')) + elif isinstance(value, (list, tuple)): + filters = value + else: + filters = [value] + self._filters = [get_filter(f) for f in filters] + filters = property(_get_filters, _set_filters) + + def _get_contents(self): + return self._contents + def _set_contents(self, value): + self._contents = value + self._resolved_contents = None + contents = property(_get_contents, _set_contents) + + def _get_extra(self): + if not self._extra and not has_files(self): + # If this bundle has no extra values of it's own, and only + # wraps child bundles, use the extra values of those. + result = {} + for bundle in self.contents: + if bundle.extra is not None: + result.update(bundle.extra) + return result + else: + return self._extra + def _set_extra(self, value): + self._extra = value + extra = property(_get_extra, _set_extra, doc="""A custom user dict of + extra values attached to this bundle. Those will be available in + template tags, and can be used to attach things like a CSS + 'media' value.""") + + def resolve_contents(self, ctx=None, force=False): + """Return an actual list of source files. + + What the user specifies as the bundle contents cannot be + processed directly. There may be glob patterns of course. We + may need to search the load path. It's common for third party + extensions to provide support for referencing assets spread + across multiple directories. + + This passes everything through :class:`Environment.resolver`, + through which this process can be customized. + + At this point, we also validate source paths to complain about + missing files early. + + The return value is a list of 2-tuples ``(original_item, + abspath)``. In the case of urls and nested bundles both tuple + values are the same. + + Set ``force`` to ignore any cache, and always re-resolve + glob patterns. + """ + if not ctx: + ctx = wrap(self.env, self) + + # TODO: We cache the values, which in theory is problematic, since + # due to changes in the env object, the result of the globbing may + # change. Not to mention that a different env object may be passed + # in. We should find a fix for this. + if getattr(self, '_resolved_contents', None) is None or force: + resolved = [] + for item in self.contents: + try: + result = ctx.resolver.resolve_source(ctx, item) + except IOError as e: + raise BundleError(e) + if not isinstance(result, list): + result = [result] + + # Exclude the output file. + # TODO: This will not work for nested bundle contents. If it + # doesn't work properly anyway, should be do it in the first + # place? If there are multiple versions, it will fail as well. + # TODO: There is also the question whether we can/should + # exclude glob duplicates. + if self.output: + try: + result.remove(self.resolve_output(ctx)) + except (ValueError, BundleError): + pass + + resolved.extend(map(lambda r: (item, r), result)) + + # Exclude duplicate files from the bundle. + # This will only keep the first occurrence of a file in the bundle. + if self.remove_duplicates: + resolved = self._filter_duplicates(resolved) + + self._resolved_contents = resolved + + return self._resolved_contents + + @staticmethod + def _filter_duplicates(resolved): + # Keep track of the resolved filenames that have been seen, and only + # add it the first time it is encountered. + seen_files = set() + result = [] + for item, r in resolved: + if r not in seen_files: + seen_files.add(r) + result.append((item, r)) + return result + + def _get_depends(self): + return self._depends + def _set_depends(self, value): + self._depends = [value] if isinstance(value, six.string_types) else value + self._resolved_depends = None + depends = property(_get_depends, _set_depends, doc= + """Allows you to define an additional set of files (glob syntax + is supported), which are considered when determining whether a + rebuild is required. + """) + + def resolve_depends(self, ctx): + # TODO: Caching is as problematic here as it is in resolve_contents(). + if not self.depends: + return [] + if getattr(self, '_resolved_depends', None) is None: + resolved = [] + for item in self.depends: + try: + result = ctx.resolver.resolve_source(ctx, item) + except IOError as e: + raise BundleError(e) + if not isinstance(result, list): + result = [result] + resolved.extend(result) + self._resolved_depends = resolved + return self._resolved_depends + + def get_version(self, ctx=None, refresh=False): + """Return the current version of the Bundle. + + If the version is not cached in memory, it will first look in the + manifest, then ask the versioner. + + ``refresh`` causes a value in memory to be ignored, and the version + to be looked up anew. + """ + if not ctx: + ctx = wrap(self.env, self) + if not self.version or refresh: + version = None + # First, try a manifest. This should be the fastest way. + if ctx.manifest: + version = ctx.manifest.query(self, ctx) + # Often the versioner is able to help. + if not version: + from .version import VersionIndeterminableError + if ctx.versions: + try: + version = ctx.versions.determine_version(self, ctx) + assert version + except VersionIndeterminableError as e: + reason = e + else: + reason = '"versions" option not set' + if not version: + raise BundleError(( + 'Cannot find version of %s. There is no manifest ' + 'which knows the version, and it cannot be ' + 'determined dynamically, because: %s') % (self, reason)) + self.version = version + return self.version + + def resolve_output(self, ctx=None, version=None): + """Return the full, absolute output path. + + If a %(version)s placeholder is used, it is replaced. + """ + if not ctx: + ctx = wrap(self.env, self) + output = ctx.resolver.resolve_output_to_path(ctx, self.output, self) + if has_placeholder(output): + output = output % {'version': version or self.get_version(ctx)} + return output + + def id(self): + """This is used to determine when a bundle definition has changed so + that a rebuild is required. + + The hash therefore should be built upon data that actually affect the + final build result. + """ + return hash_func((tuple(self.contents), + self.output, + tuple(self.filters), + bool(self.debug))) + # Note how self.depends is not included here. It could be, but we + # really want this hash to only change for stuff that affects the + # actual output bytes. Note that modifying depends will be effective + # after the first rebuild in any case. + + @property + def is_container(self): + """Return true if this is a container bundle, that is, a bundle that + acts only as a container for a number of sub-bundles. + + It must not contain any files of its own, and must have an empty + ``output`` attribute. + """ + return not has_files(self) and not self.output + + @contextmanager + def bind(self, env): + old_env = self._env + self._env = env + try: + yield + finally: + self._env = old_env + + def _get_env(self): + if self._env is None: + raise BundleError('Bundle is not connected to an environment') + return self._env + def _set_env(self, env): + self._env = env + env = property(_get_env, _set_env) + + def _merge_and_apply(self, ctx, output, force, parent_debug=None, + parent_filters=None, extra_filters=None, + disable_cache=None): + """Internal recursive build method. + + ``parent_debug`` is the debug setting used by the parent bundle. This + is not necessarily ``bundle.debug``, but rather what the calling method + in the recursion tree is actually using. + + ``parent_filters`` are what the parent passes along, for us to be + applied as input filters. Like ``parent_debug``, it is a collection of + the filters of all parents in the hierarchy. + + ``extra_filters`` may exist if the parent is a container bundle passing + filters along to its children; these are applied as input and output + filters (since there is no parent who could do the latter), and they + are not passed further down the hierarchy (but instead they become part + of ``parent_filters``. + + ``disable_cache`` is necessary because in some cases, when an external + bundle dependency has changed, we must not rely on the cache, since the + cache key is not taking into account changes in those dependencies + (for now). + """ + + parent_filters = parent_filters or [] + extra_filters = extra_filters or [] + # Determine the debug level to use. It determines if and which filters + # should be applied. + # + # The debug level is inherited (if the parent bundle is merging, a + # child bundle clearly cannot act in full debug=True mode). Bundles + # may define a custom ``debug`` attributes, but child bundles may only + # ever lower it, not increase it. + # + # If not parent_debug is given (top level), use the Environment value. + parent_debug = parent_debug if parent_debug is not None else ctx.debug + # Consider bundle's debug attribute and other things. + current_debug_level = _effective_debug_level( + ctx, self, extra_filters, default=parent_debug) + # Special case: If we end up with ``True``, assume ``False`` instead. + # The alternative would be for the build() method to refuse to work at + # this point, which seems unnecessarily inconvenient (Instead how it + # works is that urls() simply doesn't call build() when debugging). + # Note: This can only happen if the Environment sets debug=True and + # nothing else overrides it. + if current_debug_level is True: + current_debug_level = False + + # Put together a list of filters that we would want to run here. + # These will be the bundle's filters, and any extra filters given + # to use if the parent is a container bundle. Note we do not yet + # include input/open filters pushed down by a parent build iteration. + filters = merge_filters(self.filters, extra_filters) + + # Initialize the filters. This happens before we choose which of + # them should actually run, so that Filter.setup() can influence + # this choice. + for filter in filters: + filter.set_context(ctx) + # Since we call this now every single time before the filter + # is used, we might pass the bundle instance it is going + # to be used with. For backwards-compatibility reasons, this + # is problematic. However, by inspecting the support arguments, + # we can deal with it. We probably then want to deprecate + # the old syntax before 1.0 (TODO). + filter.setup() + + # Given the debug level, determine which of the filters want to run + selected_filters = select_filters(filters, current_debug_level) + + # We construct two lists of filters. The ones we want to use in this + # iteration, and the ones we want to pass down to child bundles. + # Why? Say we are in merge mode. Assume an "input()" filter which does + # not run in merge mode, and a child bundle that switches to + # debug=False. The child bundle then DOES want to run those input + # filters, so we do need to pass them. + filters_to_run = merge_filters( + selected_filters, select_filters(parent_filters, current_debug_level)) + filters_to_pass_down = merge_filters(filters, parent_filters) + + # Prepare contents + resolved_contents = self.resolve_contents(ctx, force=True) + + # Unless we have been told by our caller to use or not use the cache + # for this, try to decide for ourselves. The issue here is that when a + # bundle has dependencies, like a sass file with includes otherwise not + # listed in the bundle sources, a change in such an external include + # would not influence the cache key, thus the use of the cache causing + # such a change to be ignored. For now, we simply do not use the cache + # for any bundle with dependencies. Another option would be to read + # the contents of all files declared via "depends", and use them as a + # cache key modifier. For now I am worried about the performance impact. + # + # Note: This decision only affects the current bundle instance. Even if + # dependencies cause us to ignore the cache for this bundle instance, + # child bundles may still use it! + actually_skip_cache_here = disable_cache or bool(self.resolve_depends(ctx)) + + filtertool = FilterTool( + ctx.cache, no_cache_read=actually_skip_cache_here, + kwargs={'output': output[0], + 'output_path': output[1]}) + + # Apply input()/open() filters to all the contents. + hunks = [] + for item, cnt in resolved_contents: + if isinstance(cnt, Bundle): + # Recursively process nested bundles. + hunk = cnt._merge_and_apply( + wrap(ctx, cnt), output, force, current_debug_level, + filters_to_pass_down, disable_cache=disable_cache) + if hunk is not None: + hunks.append((hunk, {})) + + else: + # Give a filter the chance to open his file. + try: + hunk = filtertool.apply_func( + filters_to_run, 'open', [cnt], + # Also pass along the original relative path, as + # specified by the user, before resolving. + kwargs={'source': item}, + # We still need to open the file ourselves too and use + # it's content as part of the cache key, otherwise this + # filter application would only be cached by filename, + # and changes in the source not detected. The other + # option is to not use the cache at all here. Both have + # different performance implications, but I'm guessing + # that reading and hashing some files unnecessarily + # very often is better than running filters + # unnecessarily occasionally. + cache_key=[FileHunk(cnt)] if not is_url(cnt) else []) + except MoreThanOneFilterError as e: + raise BuildError(e) + except NoFilters: + # Open the file ourselves. + if is_url(cnt): + hunk = UrlHunk(cnt, env=ctx) + else: + hunk = FileHunk(cnt) + + # With the hunk, remember both the original relative + # path, as specified by the user, and the one that has + # been resolved to a filesystem location. We'll pass + # them along to various filter steps. + item_data = {'source': item, 'source_path': cnt} + + # Run input filters, unless open() told us not to. + hunk = filtertool.apply(hunk, filters_to_run, 'input', + kwargs=item_data) + hunks.append((hunk, item_data)) + + # If this bundle is empty (if it has nested bundles, they did + # not yield any hunks either), return None to indicate so. + if len(hunks) == 0: + return None + + # Merge the individual files together. There is an optional hook for + # a filter here, by implementing a concat() method. + try: + try: + final = filtertool.apply_func(filters_to_run, 'concat', [hunks]) + except MoreThanOneFilterError as e: + raise BuildError(e) + except NoFilters: + final = merge([h for h, _ in hunks]) + except IOError as e: + # IOErrors can be raised here if hunks are loaded for the + # first time. TODO: IOErrors can also be raised when + # a file is read during the filter-apply phase, but we don't + # convert it to a BuildError there... + raise BuildError(e) + + # Apply output filters. + # TODO: So far, all the situations where bundle dependencies are + # used/useful, are based on input filters having those dependencies. Is + # it even required to consider them here with respect to the cache? We + # might be able to run this operation with the cache on (the FilterTool + # being possibly configured with cache reads off). + return filtertool.apply(final, selected_filters, 'output') + + def _build(self, ctx, extra_filters=None, force=None, output=None, + disable_cache=None): + """Internal bundle build function. + + This actually tries to build this very bundle instance, as opposed to + the public-facing ``build()``, which first deals with the possibility + that we are a container bundle, i.e. having no files of our own. + + First checks whether an update for this bundle is required, via the + configured ``updater`` (which is almost always the timestamp-based one). + Unless ``force`` is given, in which case the bundle will always be + built, without considering timestamps. + + A ``FileHunk`` will be returned, or in a certain case, with no updater + defined and force=False, the return value may be ``False``. + + TODO: Support locking. When called from inside a template tag, this + should lock, so that multiple requests don't all start to build. When + called from the command line, there is no need to lock. + """ + extra_filters = extra_filters or [] + + if not self.output: + raise BuildError('No output target found for %s' % self) + + # Determine if we really need to build, or if the output file + # already exists and nothing has changed. + if force: + update_needed = True + elif not has_placeholder(self.output) and \ + not path.exists(self.resolve_output(ctx, self.output)): + update_needed = True + else: + update_needed = ctx.updater.needs_rebuild(self, ctx) \ + if ctx.updater else True + if update_needed==SKIP_CACHE: + disable_cache = True + + if not update_needed: + # We can simply return the existing output file + return FileHunk(self.resolve_output(ctx, self.output)) + + hunk = self._merge_and_apply( + ctx, [self.output, self.resolve_output(ctx, version='?')], + force, disable_cache=disable_cache, extra_filters=extra_filters) + if hunk is None: + raise BuildError('Nothing to build for %s, is empty' % self) + + if output: + # If we are given a stream, just write to it. + output.write(hunk.data()) + else: + if has_placeholder(self.output) and not ctx.versions: + raise BuildError(( + 'You have not set the "versions" option, but %s ' + 'uses a version placeholder in the output target' + % self)) + + version = None + if ctx.versions: + version = ctx.versions.determine_version(self, ctx, hunk) + + output_filename = self.resolve_output(ctx, version=version) + + # If it doesn't exist yet, create the target directory. + output_dir = path.dirname(output_filename) + if not path.exists(output_dir): + os.makedirs(output_dir) + + hunk.save(output_filename) + self.version = version + + if ctx.manifest: + ctx.manifest.remember(self, ctx, version) + if ctx.versions and version: + # Hook for the versioner (for example set the timestamp of + # the file) to the actual version. + ctx.versions.set_version(self, ctx, output_filename, version) + + # The updater may need to know this bundle exists and how it + # has been last built, in order to detect changes in the + # bundle definition, like new source files. + if ctx.updater: + ctx.updater.build_done(self, ctx) + + return hunk + + def build(self, force=None, output=None, disable_cache=None): + """Build this bundle, meaning create the file given by the ``output`` + attribute, applying the configured filters etc. + + If the bundle is a container bundle, then multiple files will be built. + + Unless ``force`` is given, the configured ``updater`` will be used to + check whether a build is even necessary. + + If ``output`` is a file object, the result will be written to it rather + than to the filesystem. + + The return value is a list of ``FileHunk`` objects, one for each bundle + that was built. + """ + ctx = wrap(self.env, self) + hunks = [] + for bundle, extra_filters, new_ctx in self.iterbuild(ctx): + hunks.append(bundle._build( + new_ctx, extra_filters, force=force, output=output, + disable_cache=disable_cache)) + return hunks + + def iterbuild(self, ctx): + """Iterate over the bundles which actually need to be built. + + This will often only entail ``self``, though for container bundles + (and container bundle hierarchies), a list of all the non-container + leafs will be yielded. + + Essentially, what this does is "skip" bundles which do not need to be + built on their own (container bundles), and gives the caller the child + bundles instead. + + The return values are 3-tuples of (bundle, filter_list, new_ctx), with + the second item being a list of filters that the parent "container + bundles" this method is processing are passing down to the children. + """ + if self.is_container: + for bundle, _ in self.resolve_contents(ctx): + if bundle.is_container: + for child, child_filters, new_ctx in \ + bundle.iterbuild(wrap(ctx, bundle)): + yield ( + child, + merge_filters(child_filters, self.filters), + new_ctx) + else: + yield bundle, self.filters, wrap(ctx, bundle) + else: + yield self, [], ctx + + def _make_output_url(self, ctx): + """Return the output url, modified for expire header handling. + """ + + # Only query the version if we need to for performance + version = None + if has_placeholder(self.output) or ctx.url_expire != False: + # If auto-build is enabled, we must not use a cached version + # value, or we might serve old versions. + version = self.get_version(ctx, refresh=ctx.auto_build) + + url = self.output + if has_placeholder(url): + url = url % {'version': version} + url = ctx.resolver.resolve_output_to_url(ctx, url) + + if ctx.url_expire or ( + ctx.url_expire is None and not has_placeholder(self.output)): + url = "%s?%s" % (url, version) + return url + + def _urls(self, ctx, extra_filters, *args, **kwargs): + """Return a list of urls for this bundle, and all subbundles, + and, when it becomes necessary, start a build process. + """ + # Check if we should calculate SRI + calculate_sri = kwargs.pop('calculate_sri', False) + + # Look at the debug value to see if this bundle should return the + # source urls (in debug mode), or a single url of the bundle in built + # form. Once a bundle needs to be built, all of it's child bundles + # are built as well of course, so at this point we leave the urls() + # recursion and start a build() recursion. + debug = _effective_debug_level(ctx, self, extra_filters) + if debug == 'merge': + supposed_to_merge = True + elif debug is True: + supposed_to_merge = False + elif debug is False: + supposed_to_merge = True + else: + raise BundleError('Invalid debug value: %s' % debug) + + # We will output a single url for this bundle unless a) the + # configuration tells us to output the source urls + # ("supposed_to_merge"), or b) this bundle isn't actually configured to + # be built, that is, has no filters and no output target. + if supposed_to_merge and (self.filters or self.output): + # With ``auto_build``, build the bundle to make sure the output is + # up to date; otherwise, we just assume the file already exists. + # (not wasting any IO ops) + if ctx.auto_build: + self._build(ctx, extra_filters=extra_filters, force=False, + *args, **kwargs) + if calculate_sri: + return [{'uri': self._make_output_url(ctx), + 'sri': calculate_sri_on_file(ctx.resolver.resolve_output_to_path(ctx, self.output, self))}] + else: + return [self._make_output_url(ctx)] + else: + # We either have no files (nothing to build), or we are + # in debug mode: Instead of building the bundle, we + # source all contents instead. + urls = [] + for org, cnt in self.resolve_contents(ctx): + if isinstance(cnt, Bundle): + urls.extend(org._urls( + wrap(ctx, cnt), + merge_filters(extra_filters, self.filters), + *args, + calculate_sri=calculate_sri, + **kwargs)) + elif is_url(cnt): + # Can't calculate SRI for non file + if calculate_sri: + urls.append({'uri': cnt, 'sri': None}) + else: + urls.append(cnt) + else: + sri = None + try: + url = ctx.resolver.resolve_source_to_url(ctx, cnt, org) + if calculate_sri: + sri = calculate_sri_on_file(ctx.resolver.resolve_output_to_path(ctx, cnt, org)) + except ValueError: + # If we cannot generate a url to a path outside the + # media directory. So if that happens, we copy the + # file into the media directory. + external = pull_external(ctx, cnt) + url = ctx.resolver.resolve_source_to_url(ctx, external, org) + if calculate_sri: + sri = calculate_sri_on_file(ctx.resolver.resolve_output_to_path(ctx, external, org)) + + if calculate_sri: + urls.append({'uri': url, 'sri': sri}) + else: + urls.append(url) + return urls + + def urls(self, *args, **kwargs): + """Return a list of urls for this bundle. + + Depending on the environment and given options, this may be a single + url (likely the case in production mode), or many urls (when we source + the original media files in DEBUG mode). + + Insofar necessary, this will automatically create or update the files + behind these urls. + + :param calculate_sri: Set to true to calculate a sub-resource integrity + string for the URLs. This changes the returned format. + + :return: List of URIs if calculate_sri is False. If calculate_sri is + true: list of {'uri': '', 'sri': ''}. + """ + ctx = wrap(self.env, self) + urls = [] + for bundle, extra_filters, new_ctx in self.iterbuild(ctx): + urls.extend(bundle._urls(new_ctx, extra_filters, *args, **kwargs)) + return urls + + +def pull_external(ctx, filename): + """Helper which will pull ``filename`` into + :attr:`Environment.directory`, for the purposes of being able to + generate a url for it. + """ + + # Generate the target filename. Use a hash to keep it unique and short, + # but attach the base filename for readability. + # The bit-shifting rids us of ugly leading - characters. + hashed_filename = hash_func(filename) + rel_path = path.join('webassets-external', + "%s_%s" % (hashed_filename, path.basename(filename))) + full_path = path.join(ctx.directory, rel_path) + + # Copy the file if necessary + if path.isfile(full_path): + gs = lambda p: os.stat(p).st_mtime + if gs(full_path) > gs(filename): + return full_path + directory = path.dirname(full_path) + if not path.exists(directory): + os.makedirs(directory) + FileHunk(filename).save(full_path) + return full_path + + +def get_all_bundle_files(bundle, ctx=None): + """Return a flattened list of all source files of the given bundle, all + its dependencies, recursively for all nested bundles. + + Making this a helper function rather than a part of the official + Bundle feels right. + """ + if not ctx: + ctx = wrap(bundle.env, bundle) + if not isinstance(ctx, ContextWrapper): + ctx = ContextWrapper(ctx) + files = [] + for _, c in bundle.resolve_contents(ctx): + if isinstance(c, Bundle): + files.extend(get_all_bundle_files(c, wrap(ctx, c))) + elif not is_url(c): + files.append(c) + files.extend(bundle.resolve_depends(ctx)) + return files + + +def _effective_debug_level(ctx, bundle, extra_filters=None, default=None): + """This is a helper used both in the urls() and the build() recursions. + + It returns the debug level that this bundle, in a tree structure + of bundles, should use. It looks at any bundle-specific ``debug`` + attribute, considers an automatic upgrade to "merge" due to filters that + are present, and will finally use the value in the ``default`` argument, + which in turn defaults to ``env.debug``. + + It also ensures our rule that in a bundle hierarchy, the debug level may + only ever be lowered. Nested bundle may lower the level from ``True`` to + ``"merge"`` to ``False``, but never in the other direction. Which makes + sense: If a bundle is already being merged, we cannot start exposing the + source urls a child bundle, not if the correct order should be maintained. + + And while in theory it would seem possible to switch between full-out + production (debug=False) and ``"merge"``, the complexity there, in + particular with view as to how certain filter types like input() and + open() need to be applied to child bundles, is just not worth it. + """ + if default is None: + default = ctx.environment.debug + + if bundle.config.get('debug') is not None: + level = bundle.config.debug + else: + # If bundle doesn't force a level, then the presence of filters which + # declare they should always run puts the bundle automatically in + # merge mode. + filters = merge_filters(bundle.filters, extra_filters) + level = 'merge' if select_filters(filters, True) else None + + if level is not None: + # The new level must be lower than the older one. We do not thrown an + # error if this is NOT the case, but silently ignore it. This is so + # that a debug=True can be used to overwrite auto_debug_upgrade. + # Otherwise debug=True would always fail. + if cmp_debug_levels(default, level) > 0: + return level + return default + + +has_files = lambda bundle: \ + any([c for c in bundle.contents if not isinstance(c, Bundle)]) diff --git a/pelican/plugins/webassets/vendor/webassets/cache.py b/pelican/plugins/webassets/vendor/webassets/cache.py new file mode 100644 index 0000000..60fabb5 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/cache.py @@ -0,0 +1,239 @@ +"""Caches are used for multiple things: + + - To speed up asset building. Filter operations every step + of the way can be cached, so that individual parts of a + build that haven't changed can be reused. + + - Bundle definitions are cached when a bundle is built so we + can determine whether they have changed and whether a rebuild + is required. + +This data is not all stored in the same cache necessarily. The +classes in this module provide the "environment.cache" object, but +also serve in other places. +""" + +import os +from os import path +import errno +import tempfile +import warnings +from webassets import six +from webassets.merge import BaseHunk +from webassets.filter import Filter, freezedicts +from webassets.utils import md5_constructor, pickle +import types + + +__all__ = ('FilesystemCache', 'MemoryCache', 'get_cache',) + + +def make_hashable(data): + """Ensures ``data`` can be hashed(). + + Mostly needs to support dict. The other special types we use + as hash keys (Hunks, Filters) already have a proper hash() method. + + See also ``make_md5``. + + Note that we do not actually hash the data for the memory cache. + """ + return freezedicts(data) + + +def make_md5(*data): + """Make a md5 hash based on``data``. + + Specifically, this knows about ``Hunk`` objects, and makes sure + the actual content is hashed. + + This is very conservative, and raises an exception if there are + data types that it does not explicitly support. This is because + we had in the past some debugging headaches with the cache not + working for this very reason. + + MD5 is faster than sha, and we don't care so much about collisions. + We care enough however not to use hash(). + """ + def walk(obj): + if isinstance(obj, (tuple, list, frozenset)): + for item in obj: + for d in walk(item): yield d + elif isinstance(obj, (dict)): + for k in sorted(obj.keys()): + for d in walk(k): yield d + for d in walk(obj[k]): yield d + elif isinstance(obj, BaseHunk): + yield obj.data().encode('utf-8') + elif isinstance(obj, int): + yield str(obj).encode('utf-8') + elif isinstance(obj, six.text_type): + yield obj.encode('utf-8') + elif isinstance(obj, six.binary_type): + yield obj + elif hasattr(obj, "id"): + for i in walk(obj.id()): + yield i + elif obj is None: + yield "None".encode('utf-8') + elif isinstance(obj, types.FunctionType): + yield str(hash(obj)).encode('utf-8') + else: + raise ValueError('Cannot MD5 type %s' % type(obj)) + md5 = md5_constructor() + for d in walk(data): + md5.update(d) + return md5.hexdigest() + + +def safe_unpickle(string): + """Unpickle the string, or return ``None`` if that fails.""" + try: + return pickle.loads(string) + except: + return None + + +class BaseCache(object): + """Abstract base class. + + The cache key must be something that is supported by the Python hash() + function. The cache value may be a string, or anything that can be pickled. + + Since the cache is used for multiple purposes, all webassets-internal code + should always tag its keys with an id, like so: + + key = ("tag", actual_key) + + One cache instance can only be used safely with a single Environment. + """ + + def get(self, key): + """Should return the cache contents, or False. + """ + raise NotImplementedError() + + def set(self, key, value): + raise NotImplementedError() + + +class MemoryCache(BaseCache): + """Caches stuff in the process memory. + + WARNING: Do NOT use this in a production environment, where you + are likely going to have multiple processes serving the same app! + + Note that the keys are used as-is, not passed through hash() (which is + a difference: http://stackoverflow.com/a/9022664/15677). However, the + reason we don't is because the original value is nicer to debug. + """ + + def __init__(self, capacity): + self.capacity = capacity + self.keys = [] + self.cache = {} + + def __eq__(self, other): + """Return equality with the config values that instantiate + this instance. + """ + return False == other or \ + None == other or \ + id(self) == id(other) + + def get(self, key): + key = make_md5(make_hashable(key)) + return self.cache.get(key, None) + + def set(self, key, value): + key = make_md5(make_hashable(key)) + self.cache[key] = value + try: + self.keys.remove(key) + except ValueError: + pass + self.keys.append(key) + + # limit cache to the given capacity + to_delete = self.keys[0:max(0, len(self.keys)-self.capacity)] + self.keys = self.keys[len(to_delete):] + for item in to_delete: + del self.cache[item] + + +class FilesystemCache(BaseCache): + """Uses a temporary directory on the disk. + """ + + V = 2 # We have changed the cache format once + + def __init__(self, directory, new_file_mode=None): + self.directory = directory + self.new_file_mode = new_file_mode + + def __eq__(self, other): + """Return equality with the config values + that instantiate this instance. + """ + return True == other or \ + self.directory == other or \ + id(self) == id(other) + + def get(self, key): + filename = path.join(self.directory, '%s' % make_md5(self.V, key)) + try: + f = open(filename, 'rb') + except IOError as e: + if e.errno != errno.ENOENT: + raise + return None + try: + result = f.read() + finally: + f.close() + + unpickled = safe_unpickle(result) + if unpickled is None: + warnings.warn('Ignoring corrupted cache file %s' % filename) + return unpickled + + def set(self, key, data): + md5 = '%s' % make_md5(self.V, key) + filename = path.join(self.directory, md5) + fd, temp_filename = tempfile.mkstemp(prefix='.' + md5, + dir=self.directory) + try: + with os.fdopen(fd, 'wb') as f: + pickle.dump(data, f) + f.flush() + # If a non default mode is specified, then chmod the file to + # it before renaming it into place + if self.new_file_mode is not None: + os.chmod(temp_filename, self.new_file_mode) + if os.path.isfile(filename): + os.unlink(filename) + os.rename(temp_filename, filename) + except: + os.unlink(temp_filename) + raise + + +def get_cache(option, ctx): + """Return a cache instance based on ``option``. + """ + if not option: + return None + + if isinstance(option, BaseCache): + return option + elif isinstance(option, type) and issubclass(option, BaseCache): + return option() + + if option is True: + directory = path.join(ctx.directory, '.webassets-cache') + # Auto-create the default directory + if not path.exists(directory): + os.makedirs(directory) + else: + directory = option + return FilesystemCache(directory, ctx.cache_file_mode) diff --git a/pelican/plugins/webassets/vendor/webassets/env.py b/pelican/plugins/webassets/vendor/webassets/env.py new file mode 100644 index 0000000..90b8cd0 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/env.py @@ -0,0 +1,847 @@ +import os +from os import path +from itertools import chain +from webassets import six +from webassets.six.moves import map +from webassets.utils import is_url + +try: + import glob2 as glob + from glob import has_magic +except ImportError: + import glob + from glob import has_magic + +from .cache import get_cache +from .version import get_versioner, get_manifest +from .updater import get_updater +from .utils import urlparse + + +__all__ = ('Environment', 'RegisterError') + + +class RegisterError(Exception): + pass + + +class ConfigStorage(object): + """This is the backend which :class:`Environment` uses to store + its configuration values. + + Environment-subclasses like the one used by ``django-assets`` will + often want to use a custom ``ConfigStorage`` as well, building upon + whatever configuration the framework is using. + + The goal in designing this class therefore is to make it easy for + subclasses to change the place the data is stored: Only + _meth:`__getitem__`, _meth:`__setitem__`, _meth:`__delitem__` and + _meth:`__contains__` need to be implemented. + + One rule: The default storage is case-insensitive, and custom + environments should maintain those semantics. + + A related reason is why we don't inherit from ``dict``. It would + require us to re-implement a whole bunch of methods, like pop() etc. + """ + + def __init__(self, env): + self.env = env + + def get(self, key, default=None): + try: + return self.__getitem__(key) + except KeyError: + return default + + def update(self, d): + for key in d: + self.__setitem__(key, d[key]) + + def setdefault(self, key, value): + if key not in self: + self.__setitem__(key, value) + return value + return self.__getitem__(key) + + def __contains__(self, key): + raise NotImplementedError() + + def __getitem__(self, key): + raise NotImplementedError() + + def __setitem__(self, key, value): + raise NotImplementedError() + + def __delitem__(self, key): + raise NotImplementedError() + + def _get_deprecated(self, key): + """For deprecated keys, fake the values as good as we can. + Subclasses need to call this in __getitem__.""" + pass + + def _set_deprecated(self, key, value): + """Same for __setitem__.""" + pass + + +def url_prefix_join(prefix, fragment): + """Join url prefix with fragment.""" + # Ensures urljoin will not cut the last part. + prefix += prefix[-1:] != '/' and '/' or '' + return urlparse.urljoin(prefix, fragment) + + +class Resolver(object): + """Responsible for resolving user-specified :class:`Bundle` + contents to actual files, as well as to urls. + + In this base version, this is essentially responsible for searching + the load path for the queried file. + + A custom implementation of this class is tremendously useful when + integrating with frameworks, which usually have some system to + spread static files across applications or modules. + + The class is designed for maximum extensibility. + """ + + def glob(self, basedir, expr): + """Evaluates a glob expression. + Yields a sorted list of absolute filenames. + """ + def glob_generator(basedir, expr): + expr = path.join(basedir, expr) + for filename in glob.iglob(expr): + if path.isdir(filename): + continue + yield path.normpath(filename) + + # The order of files returned by the glob implementation is undefined, + # so sort alphabetically to maintain a deterministic ordering + return sorted(glob_generator(basedir, expr)) + + def consider_single_directory(self, directory, item): + """Searches for ``item`` within ``directory``. Is able to + resolve glob instructions. + + Subclasses can call this when they have narrowed done the + location of a bundle item to a single directory. + """ + expr = path.join(directory, item) + if has_magic(expr): + # Note: No error if glob returns an empty list + return self.glob(directory, item) + else: + if path.exists(expr): + return expr + raise IOError("'%s' does not exist" % expr) + + def search_env_directory(self, ctx, item): + """This is called by :meth:`search_for_source` when no + :attr:`Environment.load_path` is set. + """ + return self.consider_single_directory(ctx.directory, item) + + def search_load_path(self, ctx, item): + """This is called by :meth:`search_for_source` when a + :attr:`Environment.load_path` is set. + + If you want to change how the load path is processed, + overwrite this method. + """ + if has_magic(item): + # We glob all paths. + result = [] + for path in ctx.load_path: + result.extend(self.glob(path, item)) + return result + else: + # Single file, stop when we find the first match, or error + # out otherwise. We still use glob() because then the load_path + # itself can contain globs. Neat! + for path in ctx.load_path: + result = self.glob(path, item) + if result: + return result + raise IOError("'%s' not found in load path: %s" % ( + item, ctx.load_path)) + + def search_for_source(self, ctx, item): + """Called by :meth:`resolve_source` after determining that + ``item`` is a relative filesystem path. + + You should always overwrite this method, and let + :meth:`resolve_source` deal with absolute paths, urls and + other types of items that a bundle may contain. + """ + if ctx.load_path: + return self.search_load_path(ctx, item) + else: + return self.search_env_directory(ctx, item) + + def query_url_mapping(self, ctx, filepath): + """Searches the environment-wide url mapping (based on the + urls assigned to each directory in the load path). Returns + the correct url for ``filepath``. + + Subclasses should be sure that they really want to call this + method, instead of simply falling back to ``super()``. + """ + # Build a list of dir -> url mappings + mapping = list(ctx.url_mapping.items()) + try: + mapping.append((ctx.directory, ctx.url)) + except EnvironmentError: + # Rarely, directory/url may not be set. That's ok. + pass + + # Make sure paths are absolute, normalized, and sorted by length + mapping = list(map( + lambda p_u: (path.normpath(path.abspath(p_u[0])), p_u[1]), + mapping)) + mapping.sort(key=lambda i: len(i[0]), reverse=True) + + needle = path.normpath(filepath) + for candidate, url in mapping: + if needle.startswith(candidate): + # Found it! + rel_path = needle[len(candidate) + 1:] + # If there are any subdirs in rel_path, ensure + # they use HTML-style path separators, in case + # the local OS (Windows!) has a different scheme + rel_path = rel_path.replace(os.sep, "/") + return url_prefix_join(url, rel_path) + raise ValueError('Cannot determine url for %s' % filepath) + + def resolve_source(self, ctx, item): + """Given ``item`` from a Bundle's contents, this has to + return the final value to use, usually an absolute + filesystem path. + + .. note:: + It is also allowed to return urls and bundle instances + (or generally anything else the calling :class:`Bundle` + instance may be able to handle). Indeed this is the + reason why the name of this method does not imply a + return type. + + The incoming item is usually a relative path, but may also be + an absolute path, or a url. These you will commonly want to + return unmodified. + + This method is also allowed to resolve ``item`` to multiple + values, in which case a list should be returned. This is + commonly used if ``item`` includes glob instructions + (wildcards). + + .. note:: + Instead of this, subclasses should consider implementing + :meth:`search_for_source` instead. + """ + + # Pass through some things unscathed + if not isinstance(item, six.string_types): + # Don't stand in the way of custom values. + return item + if is_url(item) or path.isabs(item): + return item + + return self.search_for_source(ctx, item) + + def resolve_output_to_path(self, ctx, target, bundle): + """Given ``target``, this has to return the absolute + filesystem path to which the output file of ``bundle`` + should be written. + + ``target`` may be a relative or absolute path, and is + usually taking from the :attr:`Bundle.output` property. + + If a version-placeholder is used (``%(version)s``, it is + still unresolved at this point. + """ + return path.join(ctx.directory, target) + + def resolve_source_to_url(self, ctx, filepath, item): + """Given the absolute filesystem path in ``filepath``, as + well as the original value from :attr:`Bundle.contents` which + resolved to this path, this must return the absolute url + through which the file is to be referenced. + + Depending on the use case, either the ``filepath`` or the + ``item`` argument will be more helpful in generating the url. + + This method should raise a ``ValueError`` if the url cannot + be determined. + """ + return self.query_url_mapping(ctx, filepath) + + def resolve_output_to_url(self, ctx, target): + """Given ``target``, this has to return the url through + which the output file can be referenced. + + ``target`` may be a relative or absolute path, and is + usually taking from the :attr:`Bundle.output` property. + + This is different from :meth:`resolve_source_to_url` in + that you do not passed along the result of + :meth:`resolve_output_to_path`. This is because in many + use cases, the filesystem is not available at the point + where the output url is needed (the media server may on + a different machine). + """ + if not path.isabs(target): + # If relative, output files are written to env.directory, + # thus we can simply base all values off of env.url. + return url_prefix_join(ctx.url, target) + else: + # If an absolute output path was specified, then search + # the url mappings. + return self.query_url_mapping(ctx, target) + + +class BundleRegistry(object): + + def __init__(self): + self._named_bundles = {} + self._anon_bundles = [] + + def __iter__(self): + return chain(six.itervalues(self._named_bundles), self._anon_bundles) + + def __getitem__(self, name): + return self._named_bundles[name] + + def __contains__(self, name): + return name in self._named_bundles + + def __len__(self): + return len(self._named_bundles) + len(self._anon_bundles) + + def __bool__(self): + return True + __nonzero__ = __bool__ # For Python 2 + + def register(self, name, *args, **kwargs): + """Register a :class:`Bundle` with the given ``name``. + + This can be called in multiple ways: + + - With a single :class:`Bundle` instance:: + + env.register('jquery', jquery_bundle) + + - With a dictionary, registering multiple bundles at once: + + bundles = {'js': js_bundle, 'css': css_bundle} + env.register(bundles) + + .. note:: + This is a convenient way to use a :doc:`loader `: + + env.register(YAMLLoader('assets.yaml').load_bundles()) + + - With many arguments, creating a new bundle on the fly:: + + env.register('all_js', jquery_bundle, 'common.js', + filters='rjsmin', output='packed.js') + """ + + from .bundle import Bundle + + # Register a dict + if isinstance(name, dict) and not args and not kwargs: + for name, bundle in name.items(): + self.register(name, bundle) + return + + if len(args) == 0: + raise TypeError('at least two arguments are required') + else: + if len(args) == 1 and not kwargs and isinstance(args[0], Bundle): + bundle = args[0] + else: + bundle = Bundle(*args, **kwargs) + + if not bundle.merge: + return self.decompose_bundle(name, bundle) + + if name in self._named_bundles: + if self._named_bundles[name] == bundle: + pass # ignore + else: + raise RegisterError('Another bundle is already registered ' + + 'as "%s": %s' % (name, self._named_bundles[name])) + else: + self._named_bundles[name] = bundle + bundle.env = self # take ownership + + return bundle + + def add(self, *bundles): + """Register a list of bundles with the environment, without + naming them. + + This isn't terribly useful in most cases. It exists primarily + because in some cases, like when loading bundles by searching + in templates for the use of an "assets" tag, no name is available. + """ + for bundle in bundles: + self._anon_bundles.append(bundle) + bundle.env = self # take ownership + + def decompose_bundle(self, name, bundle): + from .bundle import Bundle + + if not bundle.output: + raise RegisterError('If `merge` is False, an output must be defined') + + for content in bundle.contents: + if isinstance(content, Bundle): + raise RegisterError('Nested bundles are not allowed when `merge` is False') + + bundle.env = self + bundles = [] + contents = bundle.resolve_contents() + for _, abspath in contents: + nb = self.register_decomposed(name, bundle, abspath) + bundles.append(nb) + + return bundles + + def register_decomposed(self, name, bundle, abspath): + from .bundle import Bundle + + relpath = path.relpath(abspath, self.directory) + basename = path.basename(relpath) + filename, _ = path.splitext(basename) + filepath, fileext = path.splitext(relpath) + new_name = path.join(name, basename) if name else basename + # The output might also contain `%(version)s` so I can't use + # the C-style method of string formatting + output = ( + bundle.output + .replace('%(name)s', filename) + .replace('%(path)s', filepath) + .replace('%(ext)s', fileext.strip('.')) + ) + new_bundle = Bundle( + relpath, + output=output, + filters=bundle.filters, + depends=bundle.depends, + remove_duplicates=bundle.remove_duplicates, + extra=bundle.extra, + ) + new_bundle._config = bundle._config + return self.register(new_name, new_bundle) + + +# Those are config keys used by the environment. Framework-wrappers may +# find this list useful if they desire to prefix those settings. For example, +# in Django, it would be ASSETS_DEBUG. Other config keys are encouraged to use +# their own namespacing, so they don't need to be prefixed. For example, a +# filter setting might be CSSMIN_BIN. +env_options = [ + 'directory', 'url', 'debug', 'cache', 'updater', 'auto_build', + 'url_expire', 'versions', 'manifest', 'load_path', 'url_mapping', + 'cache_file_mode' ] + + +class ConfigurationContext(object): + """Interface to the webassets configuration key-value store. + + This wraps the :class:`ConfigStorage`` interface and adds some + helpers. It allows attribute-access to the most important + settings, and transparently instantiates objects, such that + ``env.manifest`` gives you an object, even though the configuration + contains the string "json". + """ + + def __init__(self, storage): + self._storage = storage + + def append_path(self, path, url=None): + """Appends ``path`` to :attr:`load_path`, and adds a + corresponding entry to :attr:`url_mapping`. + """ + self.load_path.append(path) + if url: + self.url_mapping[path] = url + + def _set_debug(self, debug): + self._storage['debug'] = debug + def _get_debug(self): + return self._storage['debug'] + debug = property(_get_debug, _set_debug, doc= + """Enable/disable debug mode. Possible values are: + + ``False`` + Production mode. Bundles will be merged and filters applied. + ``True`` + Enable debug mode. Bundles will output their individual source + files. + *"merge"* + Merge the source files, but do not apply filters. + """) + + def _set_cache_file_mode(self, mode): + self._storage['cache_file_mode'] = mode + def _get_cache_file_mode(self): + return self._storage['cache_file_mode'] + cache_file_mode = property(_get_cache_file_mode, _set_cache_file_mode, doc= + """Controls the mode of files created in the cache. The default mode + is 0600. Follows standard unix mode. + Possible values are any unix mode, e.g.: + + ``0660`` + Enable the group read+write bits + + ``0666`` + Enable world read+write bits + + """) + + def _set_cache(self, enable): + self._storage['cache'] = enable + def _get_cache(self): + cache = get_cache(self._storage['cache'], self) + if cache != self._storage['cache']: + self._storage['cache'] = cache + return cache + cache = property(_get_cache, _set_cache, doc= + """Controls the behavior of the cache. The cache will speed up rebuilding + of your bundles, by caching individual filter results. This can be + particularly useful while developing, if your bundles would otherwise take + a long time to rebuild. + + Possible values are: + + ``False`` + Do not use the cache. + + ``True`` (default) + Cache using default location, a ``.webassets-cache`` folder inside + :attr:`directory`. + + *custom path* + Use the given directory as the cache directory. + """) + + def _set_auto_build(self, value): + self._storage['auto_build'] = value + def _get_auto_build(self): + return self._storage['auto_build'] + auto_build = property(_get_auto_build, _set_auto_build, doc= + """Controls whether bundles should be automatically built, and + rebuilt, when required (if set to ``True``), or whether they + must be built manually be the user, for example via a management + command. + + This is a good setting to have enabled during debugging, and can + be very convenient for low-traffic sites in production as well. + However, there is a cost in checking whether the source files + have changed, so if you care about performance, or if your build + process takes very long, then you may want to disable this. + + By default automatic building is enabled. + """) + + def _set_manifest(self, manifest): + self._storage['manifest'] = manifest + def _get_manifest(self): + manifest = get_manifest(self._storage['manifest'], env=self) + if manifest != self._storage['manifest']: + self._storage['manifest'] = manifest + return manifest + manifest = property(_get_manifest, _set_manifest, doc= + """A manifest persists information about the versions bundles + are at. + + The Manifest plays a role only if you insert the bundle version + in your output filenames, or append the version as a querystring + to the url (via the ``url_expire`` option). It serves two + purposes: + + - Without a manifest, it may be impossible to determine the + version at runtime. In a deployed app, the media files may + be stored on a different server entirely, and be + inaccessible from the application code. The manifest, + if shipped with your application, is what still allows to + construct the proper URLs. + + - Even if it were possible to determine the version at + runtime without a manifest, it may be a costly process, + and using a manifest may give you better performance. If + you use a hash-based version for example, this hash would + need to be recalculated every time a new process is + started. + + Valid values are: + + ``"cache"`` (default) + The cache is used to remember version information. This + is useful to avoid recalculating the version hash. + + ``"file:{path}"`` + Stores version information in a file at {path}. If not + path is given, the manifest will be stored as + ``.webassets-manifest`` in ``Environment.directory``. + + ``"json:{path}"`` + Same as "file:{path}", but uses JSON to store the information. + + ``False``, ``None`` + No manifest is used. + + Any custom manifest implementation. + """) + + def _set_versions(self, versions): + self._storage['versions'] = versions + def _get_versions(self): + versions = get_versioner(self._storage['versions']) + if versions != self._storage['versions']: + self._storage['versions'] = versions + return versions + versions = property(_get_versions, _set_versions, doc= + """Defines what should be used as a Bundle ``version``. + + A bundle's version is what is appended to URLs when the + ``url_expire`` option is enabled, and the version can be part + of a Bundle's output filename by use of the ``%(version)s`` + placeholder. + + Valid values are: + + ``timestamp`` + The version is determined by looking at the mtime of a + bundle's output file. + + ``hash`` (default) + The version is a hash over the output file's content. + + ``False``, ``None`` + Functionality that requires a version is disabled. This + includes the ``url_expire`` option, the ``auto_build`` + option, and support for the %(version)s placeholder. + + Any custom version implementation. + + """) + + def set_updater(self, updater): + self._storage['updater'] = updater + def get_updater(self): + updater = get_updater(self._storage['updater']) + if updater != self._storage['updater']: + self._storage['updater'] = updater + return updater + updater = property(get_updater, set_updater, doc= + """Controls how the ``auto_build`` option should determine + whether a bundle needs to be rebuilt. + + ``"timestamp"`` (default) + Rebuild bundles if the source file timestamp exceeds the existing + output file's timestamp. + + ``"always"`` + Always rebuild bundles (avoid in production environments). + + Any custom version implementation. + """) + + def _set_url_expire(self, url_expire): + self._storage['url_expire'] = url_expire + def _get_url_expire(self): + return self._storage['url_expire'] + url_expire = property(_get_url_expire, _set_url_expire, doc= + """If you send your assets to the client using a + *far future expires* header (to minimize the 304 responses + your server has to send), you need to make sure that assets + will be reloaded by the browser when they change. + + If this is set to ``True``, then the Bundle URLs generated by + webassets will have their version (see ``Environment.versions``) + appended as a querystring. + + An alternative approach would be to use the ``%(version)s`` + placeholder in the bundle output file. + + The default behavior (indicated by a ``None`` value) is to add + an expiry querystring if the bundle does not use a version + placeholder. + """) + + def _set_directory(self, directory): + self._storage['directory'] = directory + def _get_directory(self): + try: + return path.abspath(self._storage['directory']) + except KeyError: + raise EnvironmentError( + 'The environment has no "directory" configured') + directory = property(_get_directory, _set_directory, doc= + """The base directory to which all paths will be relative to, + unless :attr:`load_path` are given, in which case this will + only serve as the output directory. + + In the url space, it is mapped to :attr:`urls`. + """) + + def _set_url(self, url): + self._storage['url'] = url + def _get_url(self): + try: + return self._storage['url'] + except KeyError: + raise EnvironmentError( + 'The environment has no "url" configured') + url = property(_get_url, _set_url, doc= + """The url prefix used to construct urls for files in + :attr:`directory`. + + To define url spaces for other directories, see + :attr:`url_mapping`. + """) + + def _set_load_path(self, load_path): + self._storage['load_path'] = load_path + def _get_load_path(self): + return self._storage['load_path'] + load_path = property(_get_load_path, _set_load_path, doc= + """An list of directories that will be searched for source files. + + If this is set, source files will only be looked for in these + directories, and :attr:`directory` is used as a location for + output files only. + + .. note: + You are free to add :attr:`directory` to your load path as + well. + + .. note: + Items on the load path are allowed to contain globs. + + To modify this list, you should use :meth:`append_path`, since + it makes it easy to add the corresponding url prefix to + :attr:`url_mapping`. + """) + + def _set_url_mapping(self, url_mapping): + self._storage['url_mapping'] = url_mapping + def _get_url_mapping(self): + return self._storage['url_mapping'] + url_mapping = property(_get_url_mapping, _set_url_mapping, doc= + """A dictionary of directory -> url prefix mappings that will + be considered when generating urls, in addition to the pair of + :attr:`directory` and :attr:`url`, which is always active. + + You should use :meth:`append_path` to add directories to the + load path along with their respective url spaces, instead of + modifying this setting directly. + """) + + def _set_resolver(self, resolver): + self._storage['resolver'] = resolver + def _get_resolver(self): + return self._storage['resolver'] + resolver = property(_get_resolver, _set_resolver) + + +class BaseEnvironment(BundleRegistry, ConfigurationContext): + """Abstract base class for :class:`Environment` with slightly more + generic assumptions, to ease subclassing. + """ + + config_storage_class = None + resolver_class = Resolver + + def __init__(self, **config): + BundleRegistry.__init__(self) + self._config = self.config_storage_class(self) + ConfigurationContext.__init__(self, self._config) + + # directory, url currently do not have default values + # + # some thought went into these defaults: + # - enable url_expire, because we want to encourage the right thing + # - default to hash versions, for the same reason: they're better + # - manifest=cache because hash versions are slow + self.config.setdefault('debug', False) + self.config.setdefault('cache', True) + self.config.setdefault('url_expire', None) + self.config.setdefault('auto_build', True) + self.config.setdefault('manifest', 'cache') + self.config.setdefault('versions', 'hash') + self.config.setdefault('updater', 'timestamp') + self.config.setdefault('load_path', []) + self.config.setdefault('url_mapping', {}) + self.config.setdefault('resolver', self.resolver_class()) + self.config.setdefault('cache_file_mode', None) + + self.config.update(config) + + @property + def config(self): + """Key-value configuration. Keys are case-insensitive. + """ + # This is a property so that user are not tempted to assign + # a custom dictionary which won't uphold our caseless semantics. + return self._config + + +class DictConfigStorage(ConfigStorage): + """Using a lower-case dict for configuration values. + """ + def __init__(self, *a, **kw): + self._dict = {} + ConfigStorage.__init__(self, *a, **kw) + def __contains__(self, key): + return self._dict.__contains__(key.lower()) + def __getitem__(self, key): + key = key.lower() + value = self._get_deprecated(key) + if not value is None: + return value + return self._dict.__getitem__(key) + def __setitem__(self, key, value): + key = key.lower() + if not self._set_deprecated(key, value): + self._dict.__setitem__(key.lower(), value) + def __delitem__(self, key): + self._dict.__delitem__(key.lower()) + + +class Environment(BaseEnvironment): + """Owns a collection of bundles, and a set of configuration values which + will be used when processing these bundles. + """ + + config_storage_class = DictConfigStorage + + def __init__(self, directory=None, url=None, **more_config): + super(Environment, self).__init__(**more_config) + if directory is not None: + self.directory = directory + if url is not None: + self.url = url + + +def parse_debug_value(value): + """Resolve the given string value to a debug option. + + Can be used to deal with os environment variables, for example. + """ + if value is None: + return value + value = value.lower() + if value in ('true', '1'): + return True + elif value in ('false', '0'): + return False + elif value in ('merge',): + return 'merge' + else: + raise ValueError() diff --git a/pelican/plugins/webassets/vendor/webassets/exceptions.py b/pelican/plugins/webassets/vendor/webassets/exceptions.py new file mode 100644 index 0000000..a642c96 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/exceptions.py @@ -0,0 +1,32 @@ +__all__ = ('BundleError', 'BuildError', 'FilterError', + 'EnvironmentError', 'ImminentDeprecationWarning') + + +class EnvironmentError(Exception): + pass + + +class BundleError(Exception): + pass + + +class BuildError(BundleError): + pass + + +class FilterError(BuildError): + pass + + +class ImminentDeprecationWarning(Warning): + """Warning category for deprecated features, since the default + DeprecationWarning is silenced on Python 2.7+. + + With webassets mainly targeting developers working directly with + the library, it makes sense to force deprecation warnings on them. + There should be no end users who will be bothered with them. + + Plus, we tend to remove rather quickly, so it's important devs + get to see this. + """ + pass diff --git a/pelican/plugins/webassets/vendor/webassets/ext/__init__.py b/pelican/plugins/webassets/vendor/webassets/ext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py b/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py new file mode 100644 index 0000000..defeb9e --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py @@ -0,0 +1,255 @@ +from __future__ import absolute_import + +import warnings +import jinja2 +from jinja2.ext import Extension +from jinja2 import nodes +from webassets import Bundle +from webassets.loaders import GlobLoader, LoaderError +from webassets.exceptions import ImminentDeprecationWarning + + +__all__ = ('assets', 'Jinja2Loader',) + + +class AssetsExtension(Extension): + """ + As opposed to the Django tag, this tag is slightly more capable due + to the expressive powers inherited from Jinja. For example: + + {% assets "src1.js", "src2.js", get_src3(), + filter=("jsmin", "gzip"), output=get_output() %} + {% endassets %} + """ + + tags = set(['assets']) + + BundleClass = Bundle # Helpful for mocking during tests. + + def __init__(self, environment): + super(AssetsExtension, self).__init__(environment) + + # Add the defaults to the environment + environment.extend( + assets_environment=None, + ) + + def parse(self, parser): + lineno = next(parser.stream).lineno + + files = [] + output = nodes.Const(None) + filters = nodes.Const(None) + dbg = nodes.Const(None) + depends = nodes.Const(None) + + # Parse the arguments + first = True + while parser.stream.current.type != 'block_end': + if not first: + parser.stream.expect('comma') + first = False + + # Lookahead to see if this is an assignment (an option) + if parser.stream.current.test('name') and parser.stream.look().test('assign'): + name = next(parser.stream).value + parser.stream.skip() + value = parser.parse_expression() + if name == 'filters': + filters = value + elif name == 'filter': + filters = value + warnings.warn('The "filter" option of the {%% assets %%} ' + 'template tag has been renamed to ' + '"filters" for consistency reasons ' + '(line %s).' % lineno, + ImminentDeprecationWarning) + elif name == 'output': + output = value + elif name == 'debug': + dbg = value + elif name == 'depends': + depends = value + else: + parser.fail('Invalid keyword argument: %s' % name) + # Otherwise assume a source file is given, which may be any + # expression, except note that strings are handled separately above. + else: + expression = parser.parse_expression() + if isinstance(expression, (nodes.List, nodes.Tuple)): + files.extend(expression.iter_child_nodes()) + else: + files.append(expression) + + # Parse the contents of this tag + body = parser.parse_statements(['name:endassets'], drop_needle=True) + + # We want to make some values available to the body of our tag. + # Specifically, the file url(s) (ASSET_URL), and any extra dict set in + # the bundle (EXTRA). + # + # A short interlope: I would have preferred to make the values of the + # extra dict available directly. Unfortunately, the way Jinja2 does + # things makes this problematic. I'll explain. + # + # Jinja2 generates Python code from it's AST which it then executes. + # So the way extensions implement making custom variables available to + # a block of code is by generating a ``CallBlock``, which essentially + # wraps our child nodes in a Python function. The arguments of this + # function are the values that are available to our tag contents. + # + # But we need to generate this ``CallBlock`` now, during parsing, and + # right now we don't know the actual ``Bundle.extra`` values yet. We + # only resolve the bundle during rendering! + # + # This would easily be solved if Jinja2 where to allow extensions to + # scope it's context, which is a dict of values that templates can + # access, just like in Django (you might see on occasion + # ``context.resolve('foo')`` calls in Jinja2's generated code). + # However, it seems the context is essentially only for the initial + # set of data passed to render(). There are some statements by Armin + # that this might change at some point, but I've run into this problem + # before, and I'm not holding my breath. + # + # I **really** did try to get around this, including crazy things like + # inserting custom Python code by patching the tag child nodes:: + # + # rv = object.__new__(nodes.InternalName) + # # l_EXTRA is the argument we defined for the CallBlock/Macro + # # Letting Jinja define l_kwargs is also possible + # nodes.Node.__init__(rv, '; context.vars.update(l_EXTRA)', + # lineno=lineno) + # # Scope required to ensure our code on top + # body = [rv, nodes.Scope(body)] + # + # This custom code would run at the top of the function in which the + # CallBlock node would wrap the code generated from our tag's child + # nodes. Note that it actually does works, but doesn't clear the values + # at the end of the scope). + # + # If it is possible to do this, it certainly isn't reasonable/ + # + # There is of course another option altogether: Simple resolve the tag + # definition to a bundle right here and now, thus get access to the + # extra dict, make all values arguments to the CallBlock (Limited to + # 255 arguments to a Python function!). And while that would work fine + # in 99% of cases, it wouldn't be correct. The compiled template could + # be cached and used with different bundles/environments, and this + # would require the bundle to resolve at parse time, and hardcode it's + # extra values. + # + # Interlope end. + # + # Summary: We have to be satisfied with a single EXTRA variable. + args = [nodes.Name('ASSET_URL', 'param'), + nodes.Name('ASSET_SRI', 'param'), + nodes.Name('EXTRA', 'param')] + + # Return a ``CallBlock``, which means Jinja2 will call a Python method + # of ours when the tag needs to be rendered. That method can then + # render the template body. + call = self.call_method( + # Note: Changing the args here requires updating ``Jinja2Loader`` + '_render_assets', args=[filters, output, dbg, depends, nodes.List(files)]) + call_block = nodes.CallBlock(call, args, [], body) + call_block.set_lineno(lineno) + return call_block + + @classmethod + def resolve_contents(cls, contents, env): + """Resolve bundle names.""" + result = [] + for f in contents: + try: + result.append(env[f]) + except KeyError: + result.append(f) + return result + + def _render_assets(self, filter, output, dbg, depends, files, caller=None): + env = self.environment.assets_environment + if env is None: + raise RuntimeError('No assets environment configured in '+ + 'Jinja2 environment') + + # Construct a bundle with the given options + bundle_kwargs = { + 'output': output, + 'filters': filter, + 'debug': dbg, + 'depends': depends + } + bundle = self.BundleClass( + *self.resolve_contents(files, env), **bundle_kwargs) + + # Retrieve urls (this may or may not cause a build) + with bundle.bind(env): + urls = bundle.urls(calculate_sri=True) + + # For each url, execute the content of this template tag (represented + # by the macro ```caller`` given to use by Jinja2). + result = u"" + for entry in urls: + if isinstance(entry, dict): + result += caller(entry['uri'], entry.get('sri', None), bundle.extra) + else: + result += caller(entry, None, bundle.extra) + return result + + +assets = AssetsExtension # nicer import name + + +class Jinja2Loader(GlobLoader): + """Parse all the Jinja2 templates in the given directory, try to + find bundles in active use. + + Try all the given environments to parse the template, until we + succeed. + """ + + def __init__(self, assets_env, directories, jinja2_envs, charset='utf8', jinja_ext='*.html'): + self.asset_env = assets_env + self.directories = directories + self.jinja2_envs = jinja2_envs + self.charset = charset + self.jinja_ext = jinja_ext + + def load_bundles(self): + bundles = [] + for template_dir in self.directories: + for filename in self.glob_files((template_dir, self.jinja_ext)): + bundles.extend(self.with_file(filename, self._parse) or []) + return bundles + + def _parse(self, filename, contents): + for i, env in enumerate(self.jinja2_envs): + try: + t = env.parse(contents.decode(self.charset)) + except jinja2.exceptions.TemplateSyntaxError as e: + #print ('jinja parser (env %d) failed: %s'% (i, e)) + pass + else: + result = [] + def _recurse_node(node_to_search): + for node in node_to_search.iter_child_nodes(): + if isinstance(node, jinja2.nodes.Call): + if isinstance(node.node, jinja2.nodes.ExtensionAttribute)\ + and node.node.identifier == AssetsExtension.identifier: + filter, output, dbg, depends, files = node.args + bundle = Bundle( + *AssetsExtension.resolve_contents(files.as_const(), self.asset_env), + **{ + 'output': output.as_const(), + 'depends': depends.as_const(), + 'filters': filter.as_const()}) + result.append(bundle) + else: + _recurse_node(node) + for node in t.iter_child_nodes(): + _recurse_node(node) + return result + else: + raise LoaderError('Jinja parser failed on %s, tried %d environments' % ( + filename, len(self.jinja2_envs))) + return False diff --git a/pelican/plugins/webassets/vendor/webassets/filter/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/__init__.py new file mode 100644 index 0000000..146f1a4 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/__init__.py @@ -0,0 +1,737 @@ +"""Assets can be filtered through one or multiple filters, modifying their +contents (think minification, compression). +""" + +from __future__ import with_statement + +import os +import subprocess +import inspect +import shlex +import tempfile +import pkgutil +from webassets import six +from webassets.six.moves import map +from webassets.six.moves import zip +try: + frozenset +except NameError: + from sets import ImmutableSet as frozenset +from webassets.exceptions import FilterError +from webassets.importlib import import_module +from webassets.utils import hash_func + + +__all__ = ('Filter', 'CallableFilter', 'get_filter', 'register_filter', + 'ExternalTool', 'JavaTool') + + +def freezedicts(obj): + """Recursively iterate over ``obj``, supporting dicts, tuples + and lists, and freeze ``dicts`` such that ``obj`` can be used + with hash(). + """ + if isinstance(obj, (list, tuple)): + return type(obj)([freezedicts(sub) for sub in obj]) + if isinstance(obj, dict): + return frozenset(six.iteritems(obj)) + return obj + + +def smartsplit(string, sep): + """Split while allowing escaping. + + So far, this seems to do what I expect - split at the separator, + allow escaping via \\, and allow the backslash itself to be escaped. + + One problem is that it can raise a ValueError when given a backslash + without a character to escape. I'd really like a smart splitter + without manually scan the string. But maybe that is exactly what should + be done. + """ + assert string is not None # or shlex will read from stdin + if not six.PY3: + # On 2.6, shlex fails miserably with unicode input + is_unicode = isinstance(string, unicode) + if is_unicode: + string = string.encode('utf8') + l = shlex.shlex(string, posix=True) + l.whitespace += ',' + l.whitespace_split = True + l.quotes = '' + if not six.PY3 and is_unicode: + return map(lambda s: s.decode('utf8'), list(l)) + else: + return list(l) + + +class option(tuple): + """Micro option system. I want this to remain small and simple, + which is why this class is lower-case. + + See ``parse_options()`` and ``Filter.options``. + """ + def __new__(cls, initarg, configvar=None, type=None): + # If only one argument given, it is the configvar + if configvar is None: + configvar = initarg + initarg = None + return tuple.__new__(cls, (initarg, configvar, type)) + + +def parse_options(options): + """Parses the filter ``options`` dict attribute. + The result is a dict of ``option`` tuples. + """ + # Normalize different ways to specify the dict items: + # attribute: option() + # attribute: ('__init__ arg', 'config variable') + # attribute: ('config variable,') + # attribute: 'config variable' + result = {} + for internal, external in options.items(): + if not isinstance(external, option): + if not isinstance(external, (list, tuple)): + external = (external,) + external = option(*external) + result[internal] = external + return result + + +class Filter(object): + """Base class for a filter. + + Subclasses should allow the creation of an instance without any + arguments, i.e. no required arguments for __init__(), so that the + filter can be specified by name only. In fact, the taking of + arguments will normally be the exception. + """ + + # Name by which this filter can be referred to. + name = None + + # Options the filter supports. The base class will ensure that + # these are both accepted by __init__ as kwargs, and may also be + # defined in the environment config, or the OS environment (i.e. + # a setup() implementation will be generated which uses + # get_config() calls). + # + # Can look like this: + # options = { + # 'binary': 'COMPASS_BINARY', + # 'plugins': option('COMPASS_PLUGINS', type=list), + # } + options = {} + + # The maximum debug level under which this filter should run. + # Most filters only run in production mode (debug=False), so this is the + # default value. However, a filter like ``cssrewrite`` needs to run in + # ``merge`` mode. Further, compiler-type filters (like less/sass) would + # say ``None``, indicating that they have to run **always**. + # There is an interesting and convenient twist here: If you use such a + # filter, the bundle will automatically be merged, even in debug mode. + # It couldn't work any other way of course, the output needs to be written + # somewhere. If you have other files that do not need compiling, and you + # don't want them pulled into the merge, you can use a nested bundle with + # it's own output target just for those files that need the compilation. + max_debug_level = False + + def __init__(self, **kwargs): + self.ctx = None + self._options = parse_options(self.__class__.options) + + # Resolve options given directly to the filter. This + # allows creating filter instances with options that + # deviate from the global default. + # TODO: can the metaclass generate a init signature? + for attribute, (initarg, _, _) in self._options.items(): + arg = initarg if initarg is not None else attribute + if arg in kwargs: + setattr(self, attribute, kwargs.pop(arg)) + else: + setattr(self, attribute, None) + if kwargs: + raise TypeError('got an unexpected keyword argument: %s' % + list(kwargs.keys())[0]) + + def __eq__(self, other): + if isinstance(other, Filter): + return self.id() == other.id() + return NotImplemented + + def set_context(self, ctx): + """This is called before the filter is used.""" + self.ctx = ctx + + def get_config(self, setting=False, env=None, require=True, + what='dependency', type=None): + """Helper function that subclasses can use if they have + dependencies which they cannot automatically resolve, like + an external binary. + + Using this function will give the user the ability to resolve + these dependencies in a common way through either a Django + setting, or an environment variable. + + You may specify different names for ``setting`` and ``env``. + If only the former is given, the latter is considered to use + the same name. If either argument is ``False``, the respective + source is not used. + + By default, if the value is not found, an error is raised. If + ``required`` is ``False``, then ``None`` is returned instead. + + ``what`` is a string that is used in the exception message; + you can use it to give the user an idea what he is lacking, + i.e. 'xyz filter binary'. + + Specifying values via the OS environment is obviously limited. If + you are expecting a special type, you may set the ``type`` argument + and a value from the OS environment will be parsed into that type. + Currently only ``list`` is supported. + """ + assert type in (None, list), "%s not supported for type" % type + + if env is None: + env = setting + + assert setting or env + + value = None + if not setting is False: + value = self.ctx.get(setting, None) + + if value is None and not env is False: + value = os.environ.get(env) + if value is not None: + if not six.PY3: + # TODO: What charset should we use? What does Python 3 use? + value = value.decode('utf8') + if type == list: + value = smartsplit(value, ',') + + if value is None and require: + err_msg = '%s was not found. Define a ' % what + options = [] + if setting: + options.append('%s setting' % setting) + if env: + options.append('%s environment variable' % env) + err_msg += ' or '.join(options) + raise EnvironmentError(err_msg) + return value + + def unique(self): + """This function is used to determine if two filter instances + represent the same filter and can be merged. Only one of the + filters will be applied. + + If your filter takes options, you might want to override this + and return a hashable object containing all the data unique + to your current instance. This will allow your filter to be applied + multiple times with differing values for those options. + """ + return False + + def id(self): + """Unique identifier for the filter instance. + + Among other things, this is used as part of the caching key. + It should therefore not depend on instance data, but yield + the same result across multiple python invocations. + """ + # freezedicts() allows filters to return dict objects as part + # of unique(), which are not per-se supported by hash(). + return hash_func((self.name, freezedicts(self.unique()),)) + + def setup(self): + """Overwrite this to have the filter do initial setup work, + like determining whether required modules are available etc. + + Since this will only be called when the user actually + attempts to use the filter, you can raise an error here if + dependencies are not matched. + + Note: In most cases, it should be enough to simply define + the ``options`` attribute. If you override this method and + want to use options as well, don't forget to call super(). + + Note: This may be called multiple times if one filter instance + is used with different asset environment instances. + """ + for attribute, (_, configvar, type) in self._options.items(): + if not configvar: + continue + if getattr(self, attribute) is None: + # No value specified for this filter instance , + # specifically attempt to load it from the environment. + setattr(self, attribute, + self.get_config(setting=configvar, require=False, + type=type)) + + def input(self, _in, out, **kw): + """Implement your actual filter here. + + This will be called for every source file. + """ + + def output(self, _in, out, **kw): + """Implement your actual filter here. + + This will be called for every output file. + """ + + def open(self, out, source_path, **kw): + """Implement your actual filter here. + + This is like input(), but only one filter may provide this. + Use this if your filter needs to read from the source file + directly, and would ignore any processing by earlier filters. + """ + + def concat(self, out, hunks, **kw): + """Implement your actual filter here. + + Will be called once between the input() and output() + steps, and should concat all the source files (given as hunks) + together, writing the result to the ``out`` stream. + + Only one such filter is allowed. + """ + + def get_additional_cache_keys(self, **kw): + """Additional cache keys dependent on keyword arguments. + + If your filter's output is dependent on some or all of the + keyword arguments, you can return these arguments here as a list. + This will make sure the caching behavior is correct. + + For example, the CSSRewrite filter depends not only on the + contents of the file it applies to, but also the output path + of the final file. If the CSSRewrite filter doesn't correctly + override this method, a certain output file with a certain base + directory might potentially get a CSSRewriten file from cache + that is meant for an output file in a different base directory. + """ + + return [] + + # We just declared those for demonstration purposes + del input + del output + del open + del concat + + +class CallableFilter(Filter): + """Helper class that create a simple filter wrapping around + callable. + """ + + def __init__(self, callable): + super(CallableFilter, self).__init__() + self.callable = callable + + def unique(self): + # XXX This means the cache will never work for those filters. + # This is actually a deeper problem: Originally unique() was + # used to remove duplicate filters. Now it is also for the cache + # key. The latter would benefit from ALL the filter's options being + # included. Possibly this might just be what we should do, at the + # expense of the "remove duplicates" functionality (because it + # is never really needed anyway). It's also illdefined when a filter + # should be a removable duplicate - most options probably SHOULD make + # a filter no longer being considered duplicate. + return self.callable + + def output(self, _in, out, **kw): + return self.callable(_in, out) + + +class ExternalToolMetaclass(type): + def __new__(cls, name, bases, attrs): + # First, determine the method defined for this very class. We + # need to pop the ``method`` attribute from ``attrs``, so that we + # create the class without the argument; allowing us then to look + # at a ``method`` attribute that parents may have defined. + # + # method defaults to 'output' if argv is set, to "implement + # no default method" without an argv. + if not 'method' in attrs and 'argv' in attrs: + chosen = 'output' + else: + chosen = attrs.pop('method', False) + + # Create the class first, since this helps us look at any + # method attributes defined in the parent hierarchy. + klass = type.__new__(cls, name, bases, attrs) + parent_method = getattr(klass, 'method', None) + + # Assign the method argument that we initially popped again. + klass.method = chosen + + try: + # Don't do anything for this class itself + ExternalTool + except NameError: + return klass + + # If the class already has a method attribute, this indicates + # that a parent class already dealt with it and enabled/disabled + # the methods, and we won't again. + if parent_method is not None: + return klass + + methods = ('output', 'input', 'open') + + if chosen is not None: + assert not chosen or chosen in methods, \ + '%s not a supported filter method' % chosen + # Disable those methods not chosen. + for m in methods: + if m != chosen: + # setdefault = Don't override actual methods the + # class has in fact provided itself. + if not m in klass.__dict__: + setattr(klass, m, None) + + return klass + + +class ExternalTool(six.with_metaclass(ExternalToolMetaclass, Filter)): + """Subclass that helps creating filters that need to run an external + program. + + You are encouraged to use this when possible, as it helps consistency. + + In the simplest possible case, subclasses only have to define one or more + of the following attributes, without needing to write any code: + + ``argv`` + The command line that will be passed to subprocess.Popen. New-style + format strings can be used to access all kinds of data: The arguments + to the filter method, as well as the filter instance via ``self``: + + argv = ['{self.binary}', '--input', '{source_path}', '--cwd', + '{self.env.directory}'] + + ``method`` + The filter method to implement. One of ``input``, ``output`` or + ``open``. + """ + + argv = [] + method = None + + def open(self, out, source_path, **kw): + self._evaluate([out, source_path], kw, out) + + def input(self, _in, out, **kw): + self._evaluate([_in, out], kw, out, _in) + + def output(self, _in, out, **kw): + self._evaluate([_in, out], kw, out, _in) + + def _evaluate(self, args, kwargs, out, data=None): + # For now, still support Python 2.5, but the format strings in argv + # are not supported (making the feature mostly useless). For this + # reason none of the builtin filters is using argv currently. + if hasattr(str, 'format'): + # Add 'self' to the keywords available in format strings + kwargs = kwargs.copy() + kwargs.update({'self': self}) + + # Resolve all the format strings in argv + def replace(arg): + try: + return arg.format(*args, **kwargs) + except KeyError as e: + # Treat "output" and "input" variables special, they + # are dealt with in :meth:`subprocess` instead. + if e.args[0] not in ('input', 'output'): + raise + return arg + argv = list(map(replace, self.argv)) + else: + argv = self.argv + self.subprocess(argv, out, data=data) + + @classmethod + def subprocess(cls, argv, out, data=None, cwd=None): + """Execute the commandline given by the list in ``argv``. + + If a byestring is given via ``data``, it is piped into data. + + If ``cwd`` is not None, the process will be executed in that directory. + + ``argv`` may contain two placeholders: + + ``{input}`` + If given, ``data`` will be written to a temporary file instead + of data. The placeholder is then replaced with that file. + + ``{output}`` + Will be replaced by a temporary filename. The return value then + will be the content of this file, rather than stdout. + """ + + class tempfile_on_demand(object): + def __repr__(self): + if not hasattr(self, 'filename'): + fd, self.filename = tempfile.mkstemp() + os.close(fd) + return self.filename + + @property + def created(self): + return hasattr(self, 'filename') + + # Replace input and output placeholders + input_file = tempfile_on_demand() + output_file = tempfile_on_demand() + if hasattr(str, 'format'): # Support Python 2.5 without the feature + argv = list(map(lambda item: + item.format(input=input_file, output=output_file), argv)) + + try: + data = (data.read() if hasattr(data, 'read') else data) + if data is not None: + data = data.encode('utf-8') + + if input_file.created: + if data is None: + raise ValueError( + '{input} placeholder given, but no data passed') + with open(input_file.filename, 'wb') as f: + f.write(data) + # No longer pass to stdin + data = None + try: + proc = subprocess.Popen( + argv, + # we cannot use the in/out streams directly, as they might be + # StringIO objects (which are not supported by subprocess) + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd, + shell=os.name == 'nt') + except OSError: + raise FilterError('Program file not found: %s.' % argv[0]) + stdout, stderr = proc.communicate(data) + if proc.returncode: + raise FilterError( + '%s: subprocess returned a non-success result code: ' + '%s, stdout=%s, stderr=%s' % ( + cls.name or cls.__name__, + proc.returncode, + stdout.decode('utf-8').strip(), + stderr.decode('utf-8').strip())) + else: + if output_file.created: + with open(output_file.filename, 'rb') as f: + out.write(f.read().decode('utf-8')) + else: + if isinstance(stdout, bytes): + out.write(stdout.decode('utf-8')) + else: + out.write(stdout) + finally: + if output_file.created: + os.unlink(output_file.filename) + if input_file.created: + os.unlink(input_file.filename) + + @classmethod + def parse_binary(cls, string): + r""" + Parse a string for a binary (executable). Allow multiple arguments + to indicate the binary (as parsed by shlex). + + Return a list of arguments suitable for passing to subprocess + functions. + + >>> ExternalTool.parse_binary('/usr/bin/lessc') + ['/usr/bin/lessc'] + + >>> ExternalTool.parse_binary('node node_modules/bin/lessc') + ['node', 'node_modules/bin/lessc'] + + >>> ExternalTool.parse_binary('"binary with spaces"') + ['binary with spaces'] + + >>> ExternalTool.parse_binary(r'binary\ with\ spaces') + ['binary with spaces'] + + >>> ExternalTool.parse_binary('') + [] + """ + return shlex.split(string) + + +class JavaTool(ExternalTool): + """Helper class for filters which are implemented as Java ARchives (JARs). + + The subclass is expected to define a ``jar`` attribute in :meth:`setup`. + + If the ``argv`` definition is used, it is expected to contain only the + arguments to be passed to the Java tool. The path to the java binary and + the jar file are added by the base class. + """ + + method = None + + def setup(self): + super(JavaTool, self).setup() + + # We can reasonably expect that java is just on the path, so + # don't require it, but hope for the best. + path = self.get_config(env='JAVA_HOME', require=False) + if path is not None: + self.java_bin = os.path.join(path, 'bin/java') + else: + self.java_bin = 'java' + + def subprocess(self, args, out, data=None): + ExternalTool.subprocess( + [self.java_bin, '-jar', self.jar] + args, out, data) + + +_FILTERS = {} + + +def register_filter(f): + """Add the given filter to the list of know filters. + """ + if not issubclass(f, Filter): + raise ValueError("Must be a subclass of 'Filter'") + if not f.name: + raise ValueError('Must have a name') + _FILTERS[f.name] = f + + +def get_filter(f, *args, **kwargs): + """Resolves ``f`` to a filter instance. + + Different ways of specifying a filter are supported, for example by + giving the class, or a filter name. + + *args and **kwargs are passed along to the filter when it's + instantiated. + """ + if isinstance(f, Filter): + # Don't need to do anything. + assert not args and not kwargs + return f + elif isinstance(f, six.string_types): + if f in _FILTERS: + klass = _FILTERS[f] + else: + raise ValueError('No filter \'%s\'' % f) + elif inspect.isclass(f) and issubclass(f, Filter): + klass = f + elif callable(f): + assert not args and not kwargs + return CallableFilter(f) + else: + raise ValueError('Unable to resolve to a filter: %s' % f) + + return klass(*args, **kwargs) + +CODE_FILES = ['.py', '.pyc', '.so'] + + +def is_module(name): + """Is this a recognized module type? + + Does this name end in one of the recognized CODE_FILES extensions? + + The file is assumed to exist, as unique_modules has found it using + an os.listdir() call. + + returns the name with the extension stripped (the module name) or + None if the name does not appear to be a module + """ + for ext in CODE_FILES: + if name.endswith(ext): + return name[:-len(ext)] + + +def is_package(directory): + """Is the (fully qualified) directory a python package? + + """ + for ext in ['.py', '.pyc']: + if os.path.exists(os.path.join(directory, '__init__'+ext)): + return True + + +def unique_modules(directory): + """Find all unique module names within a directory + + For each entry in the directory, check if it is a source + code file-type (using is_code(entry)), or a directory with + a source-code file-type at entry/__init__.py[c]? + + Filter the results to only produce a single entry for each + module name. + + Filter the results to not include '_' prefixed names. + + yields each entry as it is encountered + """ + found = {} + for entry in sorted(os.listdir(directory)): + if entry.startswith('_'): + continue + module = is_module(entry) + if module: + if module not in found: + found[module] = entry + yield module + elif is_package(os.path.join(directory, entry)): + if entry not in found: + found[entry] = entry + yield entry + + +def load_builtin_filters(): + from os import path + import warnings + + # load modules to work based with and without pyinstaller + # from: https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py + # see: https://github.com/pyinstaller/pyinstaller/issues/1905 + + # load modules using iter_modules() + # (should find all filters in normal build, but not pyinstaller) + prefix = __name__ + '.' + module_names = [m[1] for m in pkgutil.iter_modules(__path__, prefix)] + + # special handling for PyInstaller + importers = map(pkgutil.get_importer, __path__) + toc = set() + for i in importers: + if hasattr(i, 'toc'): + toc |= i.toc + for elm in toc: + if elm.startswith(prefix): + module_names.append(elm) + + for module_name in module_names: + #module_name = 'webassets.filter.%s' % name + try: + module = import_module(module_name) + except Exception as e: + warnings.warn('Error while loading builtin filter ' + 'module \'%s\': %s' % (module_name, e)) + else: + for attr_name in dir(module): + attr = getattr(module, attr_name) + if inspect.isclass(attr) and issubclass(attr, Filter): + if not attr.name: + # Skip if filter has no name; those are + # considered abstract base classes. + continue + register_filter(attr) +load_builtin_filters() diff --git a/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py b/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py new file mode 100644 index 0000000..a406c30 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py @@ -0,0 +1,85 @@ +from __future__ import with_statement + +from webassets.filter import ExternalTool +from webassets.utils import working_directory + + +class AutoprefixerFilter(ExternalTool): + """Prefixes vendor-prefixes using `autoprefixer + `, which uses the `Can I Use? + ` database to know which prefixes need to be + inserted. + + This depends on the `autoprefixer ` + command line tool being installed (use ``npm install autoprefixer``). + + *Supported configuration options*: + + AUTOPREFIXER_BIN + Path to the autoprefixer executable used to compile source files. By + default, the filter will attempt to run ``autoprefixer`` via the + system path. + + AUTOPREFIXER_BROWSERS + The browser expressions to use. This corresponds to the ``--browsers + `` flag, see the `--browsers documentation + `. By default, this flag + won't be passed, and autoprefixer's default will be used. + + Example:: + + AUTOPREFIXER_BROWSERS = ['> 1%', 'last 2 versions', 'firefox 24', 'opera 12.1'] + + AUTOPREFIXER_EXTRA_ARGS + Additional options may be passed to ``autoprefixer`` using this + setting, which expects a list of strings. + + """ + name = 'autoprefixer' + options = { + 'autoprefixer': 'AUTOPREFIXER_BIN', + 'browsers': 'AUTOPREFIXER_BROWSERS', + 'extra_args': 'AUTOPREFIXER_EXTRA_ARGS', + } + + max_debug_level = None + + def input(self, in_, out, source_path, **kw): + # Set working directory to the source file so that includes are found + args = [self.autoprefixer or 'autoprefixer'] + if self.browsers: + if isinstance(self.browsers, (list, tuple)): + self.browsers = u','.join(self.browsers) + args.extend(['--browsers', self.browsers]) + if self.extra_args: + args.extend(self.extra_args) + with working_directory(filename=source_path): + self.subprocess(args, out, in_) + + +class Autoprefixer6Filter(AutoprefixerFilter): + name = 'autoprefixer6' + + options = { + 'autoprefixer': 'AUTOPREFIXER_BIN', + 'browsers': 'AUTOPREFIXER_BROWSERS', + 'extra_args': 'AUTOPREFIXER_EXTRA_ARGS', + } + + _postcss_autoprefixer = ['-u', 'autoprefixer'] + + max_debug_level = None + + def input(self, in_, out, source_path, **kw): + # Set working directory to the source file so that includes are found + args = [self.autoprefixer or 'postcss'] + args.extend(self._postcss_autoprefixer) + + if self.browsers: + if isinstance(self.browsers, (list, tuple)): + self.browsers = u','.join(self.browsers) + args.extend(['--autoprefixer.browsers', self.browsers]) + if self.extra_args: + args.extend(self.extra_args) + with working_directory(filename=source_path): + self.subprocess(args, out, in_) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/babel.py b/pelican/plugins/webassets/vendor/webassets/filter/babel.py new file mode 100755 index 0000000..9b36e13 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/babel.py @@ -0,0 +1,77 @@ +from webassets.filter import ExternalTool + + +class Babel(ExternalTool): + """Processes ES6+ code into ES5 friendly code using `Babel `_. + + Requires the babel executable to be available externally. + To install it, you might be able to do:: + + $ npm install --global babel-cli + + You probably also want some presets:: + + $ npm install --global babel-preset-es2015 + + Example python bundle: + + .. code-block:: python + + es2015 = get_filter('babel', presets='es2015') + bundle = Bundle('**/*.js', filters=es2015) + + Example YAML bundle: + + .. code-block:: yaml + + es5-bundle: + output: dist/es5.js + config: + BABEL_PRESETS: es2015 + filters: babel + contents: + - file1.js + - file2.js + + Supported configuration options: + + BABEL_BIN + The path to the babel binary. If not set the filter will try to run + ``babel`` as if it's in the system path. + + BABEL_PRESETS + Passed straight through to ``babel --presets`` to specify which babel + presets to use + + BABEL_EXTRA_ARGS + A list of manual arguments to be specified to the babel command + + BABEL_RUN_IN_DEBUG + May be set to False to make babel not run in debug + """ + name = 'babel' + max_debug_level = None + + options = { + 'binary': 'BABEL_BIN', + 'presets': 'BABEL_PRESETS', + 'extra_args': 'BABEL_EXTRA_ARGS', + 'run_in_debug': 'BABEL_RUN_IN_DEBUG', + } + + def setup(self): + super(Babel, self).setup() + if self.run_in_debug is False: + # Disable running in debug mode for this instance. + self.max_debug_level = False + + def input(self, _in, out, **kw): + args = [self.binary or 'babel'] + if self.presets: + args += ['--presets', self.presets] + if self.extra_args: + args.extend(self.extra_args) + if 'source_path' in kw: + args.extend(['--filename', kw['source_path']]) + return self.subprocess(args, out, _in) + diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py b/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py new file mode 100644 index 0000000..f55516c --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py @@ -0,0 +1,49 @@ +import os +from subprocess import PIPE, Popen + +from webassets.filter import ExternalTool + +__all__ = ('CleanCSS',) + + +class CleanCSS(ExternalTool): + """ + Minify css using `Clean-css `_. + + Clean-css is an external tool written for NodeJS; this filter assumes that + the ``cleancss`` executable is in the path. Otherwise, you may define + a ``CLEANCSS_BIN`` setting. + + Additional options may be passed to ``cleancss`` binary using the setting + ``CLEANCSS_EXTRA_ARGS``, which expects a list of strings. + """ + + name = 'cleancss' + options = { + 'binary': 'CLEANCSS_BIN', + 'extra_args': 'CLEANCSS_EXTRA_ARGS', + } + + @property + def cleancss_ver(self): + if not hasattr(self, '_cleancss_ver'): + args = [self.binary or 'cleancss'] + args += ['--version'] + # out = b"MAJOR.MINOR.REVISION" // b"3.4.19" or b"4.0.0" + out, err = Popen(args, stdout=PIPE).communicate() + self._cleancss_ver = int(out[:out.index(b'.')]) + return self._cleancss_ver + + def output(self, _in, out, **kw): + args = [self.binary or 'cleancss'] + if self.extra_args: + args.extend(self.extra_args) + self.subprocess(args, out, _in) + + def input(self, _in, out, **kw): + args = [self.binary or 'cleancss'] + if self.cleancss_ver < 4: + args += ['--root', os.path.dirname(kw['source_path'])] + if self.extra_args: + args.extend(self.extra_args) + self.subprocess(args, out, _in) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py b/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py new file mode 100644 index 0000000..37ef78e --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('CleverCSS',) + + +class CleverCSS(Filter): + """Converts `CleverCSS `_ markup + to real CSS. + + If you want to combine it with other CSS filters, make sure this one + runs first. + """ + + name = 'clevercss' + max_debug_level = None + + def setup(self): + import clevercss + self.clevercss = clevercss + + def output(self, _in, out, **kw): + out.write(self.clevercss.convert(_in.read())) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure.py b/pelican/plugins/webassets/vendor/webassets/filter/closure.py new file mode 100644 index 0000000..76435e2 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure.py @@ -0,0 +1,75 @@ +"""Minify Javascript with `Google Closure Compiler +`_. + +Google Closure Compiler is an external tool written in Java, which needs +to be available. One way to get it is to install the +`closure `_ package:: + + pip install closure + +No configuration is necessary in this case. + +You can also define a ``CLOSURE_COMPRESSOR_PATH`` setting that +points to the ``.jar`` file. Otherwise, an environment variable by +the same name is tried. The filter will also look for a ``JAVA_HOME`` +environment variable to run the ``.jar`` file, or will otherwise +assume that ``java`` is on the system path. + +Supported configuration options: + +CLOSURE_COMPRESSOR_OPTIMIZATION + Corresponds to Google Closure's `compilation level parameter + `_. + +CLOSURE_EXTRA_ARGS + A list of further options to be passed to the Closure compiler. + There are a lot of them. + + For options which take values you want to use two items in the list:: + + ['--output_wrapper', 'foo: %output%'] +""" + +from __future__ import absolute_import +from webassets.filter import JavaTool + + +__all__ = ('ClosureJS',) + + +class ClosureJS(JavaTool): + + name = 'closure_js' + options = { + 'opt': 'CLOSURE_COMPRESSOR_OPTIMIZATION', + 'extra_args': 'CLOSURE_EXTRA_ARGS', + } + + def setup(self): + super().setup() + self.jar = self.get_jar() + + def get_jar(self): + try: + return self.get_config('CLOSURE_COMPRESSOR_PATH', + what='Google Closure Compiler') + except EnvironmentError: + try: + import closure + return closure.get_jar_filename() + except ImportError: + raise EnvironmentError( + "\nClosure Compiler jar can't be found." + "\nPlease either install the closure package:" + "\n\n pip install closure\n" + "\nor provide a CLOSURE_COMPRESSOR_PATH setting " + "or an environment variable with the full path to " + "the Closure compiler jar." + ) + + def output(self, _in, out, **kw): + args = ['--charset', 'UTF-8', + '--compilation_level', self.opt or 'WHITESPACE_ONLY'] + if self.extra_args: + args.extend(self.extra_args) + self.subprocess(args, out, _in) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py b/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py new file mode 100644 index 0000000..1b8a4e1 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py @@ -0,0 +1,50 @@ +""" Compile and Minify CSS with `Google Closure Stylesheets +`_. + +Google Closure Templates is an external tool written in Java, which needs +to be obtained separately. + +You must define a ``CLOSURE_STYLESHEETS_PATH`` setting that +points to the ``.jar`` file. Otherwise, an environment variable by +the same name is tried. The filter will also look for a ``JAVA_HOME`` +environment variable to run the ``.jar`` file, or will otherwise +assume that ``java`` is on the system path. +""" + +from webassets.filter import JavaTool + + +__all__ = ['ClosureStylesheetsCompiler', 'ClosureStylesheetsMinifier'] + + +class ClosureStylesheetsBase(JavaTool): + + def setup(self): + super(ClosureStylesheetsBase, self).setup() + try: + self.jar = self.get_config('CLOSURE_STYLESHEETS_PATH', + what='Google Closure Stylesheets tool') + except EnvironmentError: + raise EnvironmentError( + "\nGoogle Closure Stylesheets jar can't be found." + "\nPlease provide a CLOSURE_STYLESHEETS_PATH setting " + "or an environment variable with the full path to " + "the Google Closure Stylesheets jar." + ) + + def output(self, _in, out, **kw): + params = [] + if self.mode != 'minify': + params.append('--pretty-print') + self.subprocess( + params + ['{input}'], out, _in) + + +class ClosureStylesheetsCompiler(ClosureStylesheetsBase): + name = 'closure_stylesheets_compiler' + mode = 'compile' + + +class ClosureStylesheetsMinifier(ClosureStylesheetsBase): + name = 'closure_stylesheets_minifier' + mode = 'minify' diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py b/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py new file mode 100644 index 0000000..b2a15fa --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py @@ -0,0 +1,100 @@ +"""Client Side Templating with `Google Closure Templates +`_. + +Google Closure Templates is an external tool written in Java, which needs +to be available. One way to get it is to install the +`closure-soy `_ package:: + + pip install closure-soy + +No configuration is necessary in this case. + +You can also define a ``CLOSURE_TEMPLATES_PATH`` setting that +points to the ``.jar`` file. Otherwise, an environment variable by +the same name is tried. The filter will also look for a ``JAVA_HOME`` +environment variable to run the ``.jar`` file, or will otherwise +assume that ``java`` is on the system path. + +Supported configuration options: + +CLOSURE_EXTRA_ARGS + A list of further options to be passed to the Closure compiler. + There are a lot of them. + + For options which take values you want to use two items in the list:: + + ['--inputPrefix', 'prefix'] +""" + +import subprocess +import os +import tempfile + +from webassets.exceptions import FilterError +from webassets.filter.jst import JSTemplateFilter + + +__all__ = ('ClosureTemplateFilter',) + + +class ClosureTemplateFilter(JSTemplateFilter): + name = 'closure_tmpl' + options = { + 'extra_args': 'CLOSURE_EXTRA_ARGS', + } + + def process_templates(self, out, hunks, **kw): + templates = [info['source_path'] for _, info in hunks] + + temp = tempfile.NamedTemporaryFile(dir='.', delete=True) + args = ["--outputPathFormat", temp.name, '--srcs'] + args.extend(templates) + if self.extra_args: + args.extend(self.extra_args) + self.java_run(args) + out.write(open(temp.name).read()) + + def setup(self): + super(ClosureTemplateFilter, self).setup() + try: + self.jar = self.get_config('CLOSURE_TEMPLATES_PATH', + what='Google Closure Soy Templates Compiler') + except EnvironmentError: + try: + import closure_soy + self.jar = closure_soy.get_jar_filename() + except ImportError: + raise EnvironmentError( + "\nClosure Templates jar can't be found." + "\nPlease either install the closure package:" + "\n\n pip install closure-soy\n" + "\nor provide a CLOSURE_TEMPLATES_PATH setting " + "or an environment variable with the full path to " + "the Closure compiler jar." + ) + self.java_setup() + super(ClosureTemplateFilter, self).setup() + + def java_setup(self): + # We can reasonably expect that java is just on the path, so + # don't require it, but hope for the best. + path = self.get_config(env='JAVA_HOME', require=False) + if path is not None: + self.java = os.path.join(path, 'bin/java') + else: + self.java = 'java' + + def java_run(self, args): + proc = subprocess.Popen( + [self.java, '-jar', self.jar] + args, + # we cannot use the in/out streams directly, as they might be + # StringIO objects (which are not supported by subprocess) + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate() + if proc.returncode: + raise FilterError('%s: subprocess returned a ' + 'non-success result code: %s, stdout=%s, stderr=%s' % ( + self.name, proc.returncode, stdout, stderr)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py b/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py new file mode 100644 index 0000000..9ff075d --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py @@ -0,0 +1,62 @@ +from __future__ import print_function +import os, subprocess + +from webassets.filter import Filter +from webassets.exceptions import FilterError, ImminentDeprecationWarning + + +__all__ = ('CoffeeScript',) + + +class CoffeeScript(Filter): + """Converts `CoffeeScript `_ + to real JavaScript. + + If you want to combine it with other JavaScript filters, make sure this + one runs first. + + Supported configuration options: + + COFFEE_NO_BARE + Set to ``True`` to compile with the top-level function + wrapper (suppresses the --bare option to ``coffee``, which + is used by default). + """ + + name = 'coffeescript' + max_debug_level = None + options = { + 'coffee_deprecated': (False, 'COFFEE_PATH'), + 'coffee_bin': ('binary', 'COFFEE_BIN'), + 'no_bare': 'COFFEE_NO_BARE', + } + + def output(self, _in, out, **kw): + binary = self.coffee_bin or self.coffee_deprecated or 'coffee' + if self.coffee_deprecated: + import warnings + warnings.warn( + 'The COFFEE_PATH option of the "coffeescript" ' + +'filter has been deprecated and will be removed.' + +'Use COFFEE_BIN instead.', ImminentDeprecationWarning) + + args = "-sp" + ("" if self.no_bare else 'b') + try: + proc = subprocess.Popen([binary, args], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=(os.name == 'nt')) + except OSError as e: + if e.errno == 2: + raise Exception("coffeescript not installed or in system path for webassets") + raise + stdout, stderr = proc.communicate(_in.read().encode('utf-8')) + if proc.returncode != 0: + raise FilterError(('coffeescript: subprocess had error: stderr=%s, '+ + 'stdout=%s, returncode=%s') % ( + stderr, stdout, proc.returncode)) + elif stderr: + print("coffeescript filter has warnings:", stderr) + out.write(stdout.decode('utf-8')) + diff --git a/pelican/plugins/webassets/vendor/webassets/filter/compass.py b/pelican/plugins/webassets/vendor/webassets/filter/compass.py new file mode 100644 index 0000000..256544f --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/compass.py @@ -0,0 +1,255 @@ +""" +Generally speaking, compass provides a command line util that is used + a) as a management script (like django-admin.py) doing for example + setup work, adding plugins to a project etc), and + b) can compile the sass source files into CSS. + +While generally project-based, starting with 0.10, compass supposedly +supports compiling individual files, which is what we are using for +implementing this filter. Supposedly, because there are numerous issues +that require working around. See the comments in the actual filter code +for the full story on all the hoops be have to jump through. + +An alternative option would be to use Sass to compile. Compass essentially +adds two things on top of sass: A bunch of CSS frameworks, ported to Sass, +and available for including. And various ruby helpers that these frameworks +and custom Sass files can use. Apparently there is supposed to be a way +to compile a compass project through sass, but so far, I haven't got it +to work. The syntax is supposed to be one of: + + $ sass -r compass `compass imports` FILE + $ sass --compass FILE + +See: + http://groups.google.com/group/compass-users/browse_thread/thread/a476dfcd2b47653e + http://groups.google.com/group/compass-users/browse_thread/thread/072bd8b51bec5f7c + http://groups.google.com/group/compass-users/browse_thread/thread/daf55acda03656d1 +""" + +import os +from os import path +import tempfile +import shutil +import subprocess +from io import open +from webassets import six + +from webassets.exceptions import FilterError +from webassets.filter import Filter, option + + +__all__ = ('Compass',) + + +class CompassConfig(dict): + """A trivial dict wrapper that can generate a Compass config file.""" + + def to_string(self): + def string_rep(val): + """ Determine the correct string rep for the config file """ + if isinstance(val, bool): + # True -> true and False -> false + return six.text_type(val).lower() + elif isinstance(val, six.string_types) and val.startswith(':'): + # ruby symbols, like :nested, used for "output_style" + return six.text_type(val) + elif isinstance(val, dict): + # ruby hashes, for "sass_options" for example + return u'{%s}' % ', '.join("'%s' => '%s'" % i for i in val.items()) + elif isinstance(val, tuple): + val = list(val) + # works fine with strings and lists + return repr(val) + return u'\n'.join(['%s = %s' % (k, string_rep(v)) for k, v in self.items()]) + + +class Compass(Filter): + """Converts `Compass `_ .sass files to + CSS. + + Requires at least version 0.10. + + To compile a standard Compass project, you only need to have + to compile your main ``screen.sass``, ``print.sass`` and ``ie.sass`` + files. All the partials that you include will be handled by Compass. + + If you want to combine the filter with other CSS filters, make + sure this one runs first. + + Supported configuration options: + + COMPASS_BIN + The path to the Compass binary. If not set, the filter will + try to run ``compass`` as if it's in the system path. + + COMPASS_PLUGINS + Compass plugins to use. This is equivalent to the ``--require`` + command line option of the Compass. and expects a Python list + object of Ruby libraries to load. + + COMPASS_CONFIG + An optional dictionary of Compass `configuration options + `_. + The values are emitted as strings, and paths are relative to the + Environment's ``directory`` by default; include a ``project_path`` + entry to override this. + + The ``sourcemap`` option has a caveat. A file called _.css.map is + created by Compass in the tempdir (where _.scss is the original asset), + which is then moved into the output_path directory. Since the tempdir + is created one level down from the output path, the relative links in + the sourcemap should correctly map. This file, however, will not be + versioned, and thus this option should ideally only be used locally + for development and not in production with a caching service as the + _.css.map file will not be invalidated. + """ + + name = 'compass' + max_debug_level = None + options = { + 'compass': ('binary', 'COMPASS_BIN'), + 'plugins': option('COMPASS_PLUGINS', type=list), + 'config': 'COMPASS_CONFIG', + } + + def open(self, out, source_path, **kw): + """Compass currently doesn't take data from stdin, and doesn't allow + us accessing the result from stdout either. + + Also, there's a bunch of other issues we need to work around: + + - compass doesn't support given an explicit output file, only a + "--css-dir" output directory. + + We have to "guess" the filename that will be created in that + directory. + + - The output filename used is based on the input filename, and + simply cutting of the length of the "sass_dir" (and changing + the file extension). That is, compass expects the input + filename to always be inside the "sass_dir" (which defaults to + ./src), and if this is not the case, the output filename will + be gibberish (missing characters in front). See: + https://github.com/chriseppstein/compass/issues/304 + + We fix this by setting the proper --sass-dir option. + + - Compass insists on creating a .sass-cache folder in the + current working directory, and unlike the sass executable, + there doesn't seem to be a way to disable it. + + The workaround is to set the working directory to our temp + directory, so that the cache folder will be deleted at the end. + """ + + # Create temp folder one dir below output_path so sources in + # sourcemap are correct. This will be in the project folder, + # and as such, while exteremly unlikely, this could interfere + # with existing files and directories. + tempout_dir = path.normpath( + path.join(path.dirname(kw['output_path']), '../') + ) + tempout = tempfile.mkdtemp(dir=tempout_dir) + # Temporarily move to "tempout", so .sass-cache will be created there + old_wd = os.getcwd() + os.chdir(tempout) + try: + # Make sure to use normpath() to not cause trouble with + # compass' simplistic path handling, where it just assumes + # source_path is within sassdir, and cuts off the length of + # sassdir from the input file. + sassdir = path.normpath(path.dirname(source_path)) + source_path = path.normpath(source_path) + + # Compass offers some helpers like image-url(), which need + # information about the urls under which media files will be + # available. This is hard for two reasons: First, the options in + # question aren't supported on the command line, so we need to write + # a temporary config file. Secondly, they assume defined and + # separate directories for "images", "stylesheets" etc., something + # webassets knows nothing of: we don't support the user defining + # such directories. Because we traditionally had this + # filter point all type-specific directories to the root media + # directory, we will define the paths to match this. In other + # words, in Compass, both inline-image("img/test.png) and + # image-url("img/test.png") will find the same file, and assume it + # to be {env.directory}/img/test.png. + # However, this partly negates the purpose of an utility like + # image-url() in the first place - you not having to hard code + # the location of your images. So we allow direct modification of + # the configuration file via the COMPASS_CONFIG setting (see + # tickets #36 and #125). + # + # Note that there is also the --relative-assets option, which we + # can't use because it calculates an actual relative path between + # the image and the css output file, the latter being in a + # temporary directory in our case. + config = CompassConfig( + project_path=self.ctx.directory, + http_path=self.ctx.url, + http_images_dir='', + http_stylesheets_dir='', + http_fonts_dir='', + http_javascripts_dir='', + images_dir='', + output_style=':expanded', + ) + # Update with the custom config dictionary, if any. + if self.config: + config.update(self.config) + config_file = path.join(tempout, '.config.rb') + f = open(config_file, 'w') + try: + f.write(config.to_string()) + f.flush() + finally: + f.close() + + command = [self.compass or 'compass', 'compile'] + for plugin in self.plugins or []: + command.extend(('--require', plugin)) + command.extend(['--sass-dir', sassdir, + '--css-dir', tempout, + '--config', config_file, + '--quiet', + '--boring', + source_path]) + proc = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + # shell: necessary on windows to execute + # ruby files, but doesn't work on linux. + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate() + + # compass seems to always write a utf8 header? to stderr, so + # make sure to not fail just because there's something there. + if proc.returncode != 0: + raise FilterError(('compass: subprocess had error: stderr=%s, '+ + 'stdout=%s, returncode=%s') % ( + stderr, stdout, proc.returncode)) + + guessed_outputfilename = path.splitext(path.basename(source_path))[0] + guessed_outputfilepath = path.join(tempout, guessed_outputfilename) + output_file = open("%s.css" % guessed_outputfilepath, encoding='utf-8') + if config.get('sourcemap'): + sourcemap_file = open("%s.css.map" % guessed_outputfilepath) + sourcemap_output_filepath = path.join( + path.dirname(kw['output_path']), + path.basename(sourcemap_file.name) + ) + if not path.exists(path.dirname(sourcemap_output_filepath)): + os.mkdir(path.dirname(sourcemap_output_filepath)) + sourcemap_output_file = open(sourcemap_output_filepath, 'w') + sourcemap_output_file.write(sourcemap_file.read()) + sourcemap_file.close() + try: + contents = output_file.read() + out.write(contents) + finally: + output_file.close() + finally: + # Restore previous working dir + os.chdir(old_wd) + # Clean up the temp dir + shutil.rmtree(tempout) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py b/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py new file mode 100644 index 0000000..bf0c4a2 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py @@ -0,0 +1,26 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('CSSMin',) + + +class CSSMin(Filter): + """Minifies CSS. + + Requires the ``cssmin`` package (http://github.com/zacharyvoase/cssmin), + which is a port of the YUI CSS compression algorithm. + """ + + name = 'cssmin' + + def setup(self): + try: + import cssmin + except ImportError: + raise EnvironmentError('The "cssmin" package is not installed.') + else: + self.cssmin = cssmin + + def output(self, _in, out, **kw): + out.write(self.cssmin.cssmin(_in.read())) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py b/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py new file mode 100644 index 0000000..59cffbb --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('CSSPrefixer',) + + +class CSSPrefixer(Filter): + """Uses `CSSPrefixer `_ + to add vendor prefixes to CSS files. + """ + + name = 'cssprefixer' + max_debug_level = 'merge' + + def setup(self): + import cssprefixer + self.cssprefixer = cssprefixer + + def output(self, _in, out, **kw): + output = self.cssprefixer.process(_in.read(), False, False) + if isinstance(output, unicode): + # cssprefixer likes to return unicode strings + output = output.encode('utf8') + out.write(output) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py new file mode 100644 index 0000000..a3985e0 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py @@ -0,0 +1,110 @@ +import os +from os.path import join +from webassets.utils import common_path_prefix +from webassets.utils import urlparse +from . import urlpath +try: + from collections import OrderedDict +except ImportError: + # Use an ordered dict when available, otherwise we simply don't + # support ordering - it's just a nice bonus. + OrderedDict = dict + +from .base import CSSUrlRewriter, addsep, path2url + + +__all__ = ('CSSRewrite',) + + +class CSSRewrite(CSSUrlRewriter): + """Source filter that rewrites relative urls in CSS files. + + CSS allows you to specify urls relative to the location of the CSS file. + However, you may want to store your compressed assets in a different place + than source files, or merge source files from different locations. This + would then break these relative CSS references, since the base URL changed. + + This filter transparently rewrites CSS ``url()`` instructions in the source + files to make them relative to the location of the output path. It works as + a *source filter*, i.e. it is applied individually to each source file + before they are merged. + + No configuration is necessary. + + The filter also supports a manual mode:: + + get_filter('cssrewrite', replace={'old_directory':'/custom/path/'}) + + This will rewrite all urls that point to files within ``old_directory`` to + use ``/custom/path`` as a prefix instead. + + You may plug in your own replace function:: + + get_filter('cssrewrite', replace=lambda url: re.sub(r'^/?images/', '/images/', url)) + get_filter('cssrewrite', replace=lambda url: '/images/'+url[7:] if url.startswith('images/') else url) + """ + + # TODO: If we want to support inline assets, this needs to be + # updated to optionally convert URLs to absolute ones based on + # MEDIA_URL. + + name = 'cssrewrite' + max_debug_level = 'merge' + + def __init__(self, replace=False): + super(CSSRewrite, self).__init__() + self.replace = replace + + def unique(self): + # Allow mixing the standard version of this filter, and replace mode. + return self.replace + + def input(self, _in, out, **kw): + if self.replace not in (False, None) and not callable(self.replace): + # For replace mode, make sure we have all the directories to be + # rewritten in form of a url, so we can later easily match it + # against the urls encountered in the CSS. + replace_dict = False + root = addsep(self.ctx.directory) + replace_dict = OrderedDict() + for repldir, sub in self.replace.items(): + repldir = addsep(os.path.normpath(join(root, repldir))) + replurl = path2url(repldir[len(common_path_prefix([root, repldir])):]) + replace_dict[replurl] = sub + self.replace_dict = replace_dict + + return super(CSSRewrite, self).input(_in, out, **kw) + + def replace_url(self, url): + # Replace mode: manually adjust the location of files + if callable(self.replace): + return self.replace(url) + elif self.replace is not False: + for to_replace, sub in self.replace_dict.items(): + targeturl = urlparse.urljoin(self.source_url, url) + if targeturl.startswith(to_replace): + url = "%s%s" % (sub, targeturl[len(to_replace):]) + # Only apply the first match + break + + # Default mode: auto correct relative urls + else: + # If path is an absolute one, keep it + parsed = urlparse.urlparse(url) + if not parsed.scheme and not parsed.path.startswith('/'): + abs_source_url = urlparse.urljoin(self.source_url, url) + + # relpath() will not detect this case + if urlparse.urlparse(abs_source_url).scheme: + return abs_source_url + + # rewritten url: relative path from new location (output) + # to location of referenced file (source + current url) + url = urlpath.relpath(self.output_url, abs_source_url) + + return url + + def get_additional_cache_keys(self, **kw): + if 'output_path' in kw: + return [os.path.dirname(kw['output_path'])] + return [] diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py new file mode 100644 index 0000000..db358a7 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py @@ -0,0 +1,118 @@ +import os +import re +from os.path import join, normpath +from webassets.filter import Filter +from webassets.utils import common_path_prefix + + +__all__ = () + + +def addsep(path): + """Add a trailing path separator.""" + if path and path[-1] != os.path.sep: + return path + os.path.sep + return path + + +def path2url(path): + """Simple helper for NT systems to replace slash syntax.""" + if os.name == 'nt': + return path.replace('\\', '/') + return path + + +class PatternRewriter(Filter): + """Base class for input filters which want to replace certain patterns. + """ + + # Define the patterns in the form of: + # method to call -> pattern to call it for (as a compiled regex) + patterns = {} + + def input(self, _in, out, **kw): + content = _in.read() + for func, pattern in self.patterns.items(): + if not callable(func): + func = getattr(self, func) + # Should this pass along **kw? How many subclasses would need it? + # As is, subclasses needing access need to overwrite input() and + # set class attributes. + content = pattern.sub(func, content) + out.write(content) + + +urltag_re = re.compile(r""" +url\( + (\s*) # allow whitespace wrapping (and capture) + ( # capture actual url + [^\)\\\r\n]*? # don't allow newlines, closing paran, escape chars (1) + (?:\\. # process all escapes here instead + [^\)\\\r\n]*? # proceed, with previous restrictions (1) + )* # repeat until end + ) + (\s*) # whitespace again (and capture) +\) + +# (1) non-greedy to let the last whitespace group capture something +# TODO: would it be faster to handle whitespace within _rewrite()? +""", re.VERBOSE) + + +class CSSUrlRewriter(PatternRewriter): + """Base class for input filters which need to replace url() statements + in CSS stylesheets. + """ + + patterns = { + 'rewrite_url': urltag_re + } + + def input(self, _in, out, **kw): + source, source_path, output, output_path = \ + kw['source'], kw['source_path'], kw['output'], kw['output_path'] + + self.source_path = source_path + self.output_path = output_path + self.source_url = self.ctx.resolver.resolve_source_to_url( + self.ctx, source_path, source) + self.output_url = self.ctx.resolver.resolve_output_to_url( + self.ctx, output) + + return super(CSSUrlRewriter, self).input(_in, out, **kw) + + def rewrite_url(self, m): + # Get the regex matches; note how we maintain the exact + # whitespace around the actual url; we'll indeed only + # replace the url itself. + text_before = m.groups()[0] + url = m.groups()[1] + text_after = m.groups()[2] + + # Normalize the url: remove quotes + quotes_used = '' + if url[:1] in '"\'': + quotes_used = url[:1] + url = url[1:] + if url[-1:] in '"\'': + url = url[:-1] + + url = self.replace_url(url) or url + + result = 'url(%s%s%s%s%s)' % ( + text_before, quotes_used, url, quotes_used, text_after) + return result + + def replace_url(self, url): + """Implement this to return a replacement for each URL found.""" + raise NotImplementedError() + + +if __name__ == '__main__': + for text, expect in [ + (r' url(icon\)xyz) ', r'url(icon\)xyz)'), + (r' url(icon\\)xyz) ', r'url(icon\\)'), + (r' url(icon\\\)xyz) ', r'url(icon\\\)xyz)'), + ]: + m = urltag_re.search(text) + assert m.group() == expect diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/urlpath.py b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/urlpath.py new file mode 100644 index 0000000..f768da5 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/urlpath.py @@ -0,0 +1,269 @@ +# urlpath.py + +# 0.1.0 +# 2005/08/20 + +# Functions that handle url paths. +# Part of Pythonutils +# http://www.voidspace.org.uk/python/pythonutils.html + +# Copyright Michael Foord, 2004 & 2005. +# Released subject to the BSD License +# Please see http://www.voidspace.org.uk/python/license.shtml + +# For information about bugfixes, updates and support, please join the +# Pythonutils mailing list. +# http://groups.google.com/group/pythonutils/ +# Comments, suggestions and bug reports welcome. +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml +# E-mail fuzzyman@voidspace.org.uk + +from __future__ import print_function +import posixpath +import os +try: + from urllib.request import url2pathname, pathname2url +except ImportError: + from urllib import url2pathname, pathname2url + +__all__ = [ + 'nativejoin', + 'pathjoin', + 'relpathto', + 'tslash', + 'relpath' + ] + +def pathjoin(base, *paths): + """ + Join paths to a base, observing pardir. + + If base doesn't *end* with '/' we assume it's a file rather than a directory. + (so we get rid of it) + """ + # XXXX will posixpath.join do all this anyway? + if base and not base.endswith('/'): + # get rid of the filename + base = '/'.join(base.split('/')[:-1]) + base = tslash(base) + path = (base,) + paths + return posixpath.normpath(posixpath.join(*path)) + +def nativejoin(base, path): + """ + Joins two paths - returning a native file path. + + Given a base path and a relative location, (in posix format) + return a file path in a (relatively) OS native way. + """ + return url2pathname(pathjoin(base, path)) + +def relpathto(thisdir, origin, dest): + """ + Given two paths relative to a directory, work out a path from origin + to destination. + + Assumes UNIX/URL type relative paths. + If origin doesn't *end* with '/' we assume it's a file rather than a + directory. + + If the same paths are passed in : + if the path ends with ('/') then we return '' + else we return the last part of the path (presumably a filename) + + If thisdir doesn't start with '/' then we add one + (this makes the top level of thisdir our root directory) + """ + orig_thisdir = thisdir + if not thisdir.startswith('/'): + thisdir = '/' + thisdir + orig_abs = posixpath.normpath(posixpath.join(thisdir, origin)) + dest_abs = posixpath.normpath(posixpath.join(thisdir, dest)) + if origin.endswith('/') and not orig_abs.endswith('/'): + orig_abs = orig_abs + '/' + if dest.endswith('/') and not dest_abs.endswith('/'): + dest_abs = dest_abs + '/' +# print orig_abs, dest_abs + # + # if the first item is a filename, we want to get rid of it + orig_list = orig_abs.split('/')[:-1] + dest_list = dest_abs.split('/') +# print orig_list, dest_list + + if orig_list[0] != dest_list[0]: + # can't get here from there + # XXXX raise exception? + return dest + # + # find the location where the two paths start to differ. + i = 0 + for start_seg, dest_seg in zip(orig_list, dest_list): + if start_seg != dest_seg: + break + i += 1 + # + # now i is the point where the two paths diverge; + # need a certain number of "os.pardir"s to work up + # from the origin to the point of divergence. + segments = ['..'] * (len(orig_list) - i) + # need to add the diverging part of dest_list. + segments += dest_list[i:] + if len(segments) == 0: + # if they happen to be identical paths + # identical directories + if dest.endswith('/'): + return '' + # just the filename - the last part of dest + return dest_list[-1] + else: + return '/'.join(segments) + +def relpath(origin, dest): + """Given two absolute paths, work out a path from origin to destination. + + Assumes UNIX/URL type relative paths. + If origin doesn't *end* with '/' we assume it's a file rather than + a directory. + + If the same paths are passed in : + if the path ends with ('/') then we return '' + else we return the last part of the path (presumably a filename) + + If origin or dest don't start with '/' then we add it. + + We are *assuming* relative paths on the same device + (i.e. same top level directory) + """ + if not origin.startswith('/'): + origin = '/' + origin + if not dest.startswith('/'): + dest = '/' + dest + # + # if the first item is a filename, we want to get rid of it + orig_list = origin.split('/')[:-1] + dest_list = dest.split('/') + # + # find the location where the two paths start to differ. + i = 0 + for start_seg, dest_seg in zip(orig_list, dest_list): + if start_seg != dest_seg: + break + i += 1 + + # now i is the point where the two paths diverge. + # need a certain number of "os.pardir"s to work up + # from the origin to the point of divergence. + segments = ['..'] * (len(orig_list) - i) + # need to add the diverging part of dest_list. + segments += dest_list[i:] + if len(segments) == 0: + # if they happen to be identical paths + # identical directories + if dest.endswith('/'): + return '' + # just the filename - the last part of dest + return dest_list[-1] + else: + return '/'.join(segments) + +def tslash(apath): + """Add a trailing slash to a path if it needs one. + + Doesn't use os.sep because you end up jiggered on windoze - when you + want separators for URLs. + """ + if (apath and + apath != '.' and + not apath.endswith('/') and + not apath.endswith('\\')): + return apath + '/' + else: + return apath + +############################################## + +def testJoin(): + thelist = [ + ('/', 'fish.html'), + ('/dir/dir/', '../file'), + ('dir/dir/', '../file'), + ('dir/dir/', '../../file'), + ('dir/dir/', '../../../file'), + ('/dir/dir/', '../notherdir/file'), + ('/dir/dir/', '../../notherdir/file'), + ('dir/dir/', '../../notherdir/file'), + ('dir/dir/', '../../../notherdir/file'), + ('', '../path'), + ] + for entry in thelist: + print(entry, ' :: ', pathjoin(*entry)) + print(entry, ' :: ', nativejoin(*entry)) + print('\n') + +def testRelpathto(): + thedir = '//toplevel/dirone/dirtwo/dirthree' + thelist = [ + ('file1.html', 'file2.html'), + ('file1.html', '../file2.html'), + ('../file1.html', '../file2.html'), + ('../file1.html', 'file2.html'), + ('../fish1/fish2/', '../../sub1/sub2/'), + ('../fish1/fish2/', 'sub1/sub2'), + ('../../../fish1/fish2/', 'sub1/sub2/'), + ('../../../fish1/fish2/', 'sub1/sub2/file1.html'), + ] + for orig, dest in thelist: + print('(%s, %s) : ' % (orig, dest), relpathto(thedir, orig, dest)) + +def testRelpathto2(): + thedir = 'section3/' + thelist = [ + ('../archive/strangeindex1.html', 'article2.html'), + ] + for orig, dest in thelist: + answer = relpathto(thedir, orig, dest) + print('(%s, %s) : ' % (orig, dest), answer) + +def testRelpath(): + thelist = [ + ('/hello/fish/', 'bungles'), + ] + for orig, dest in thelist: + answer = relpath(orig, dest) + print('(%s, %s) : ' % (orig, dest), answer) + + +if __name__ == '__main__': + testJoin() + testRelpathto() + testRelpath() +# testRelpathto2() + +""" +TODO +==== + +More comprehensive tests. + +CHANGELOG +2005/07/31 +Can now pass multiple args to ``pathjoin``. +Finalised as version 0.1.0 + +2005/06/18 +Changes by Nicola Larosa + Code cleanup + lines shortened + comments on line above code + empty comments in empty lines + +2005/05/28 +Added relpath to __all__ + + +TODO +Move into pythonutils +relpathto could call relpath (and so be shorter) +nativejoin could accept multiple paths +Could tslash be more elegant ? +""" diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py b/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py new file mode 100644 index 0000000..2b8f4a1 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import +import logging +import logging.handlers + +from webassets.filter import Filter + + +__all__ = ('CSSUtils',) + + +class CSSUtils(Filter): + """Minifies CSS by removing whitespace, comments etc., using the Python + `cssutils `_ library. + + Note that since this works as a parser on the syntax level, so invalid + CSS input could potentially result in data loss. + """ + + name = 'cssutils' + + def setup(self): + import cssutils + self.cssutils = cssutils + + # cssutils is unaware of many new CSS3 properties, + # vendor-prefixes etc., and logs many non-fatal warnings + # about them. These diagnostic messages are rather + # useless, so disable everything that's non-fatal. + cssutils.log.setLevel(logging.FATAL) + + def output(self, _in, out, **kw): + sheet = self.cssutils.parseString(_in.read()) + self.cssutils.ser.prefs.useMinified() + out.write(sheet.cssText.decode('utf-8')) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/datauri.py b/pelican/plugins/webassets/vendor/webassets/filter/datauri.py new file mode 100644 index 0000000..339fed6 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/datauri.py @@ -0,0 +1,71 @@ +from base64 import b64encode +import mimetypes +import os +from webassets.utils import urlparse + +from webassets.filter.cssrewrite.base import CSSUrlRewriter + + +__all__ = ('CSSDataUri',) + + +class CSSDataUri(CSSUrlRewriter): + """Will replace CSS url() references to external files with internal + `data: URIs `_. + + The external file is now included inside your CSS, which minimizes HTTP + requests. + + .. note:: + + Data Uris have `clear disadvantages `_, + so put some thought into if and how you would like to use them. Have + a look at some `performance measurements `_. + + The filter respects a ``DATAURI_MAX_SIZE`` option, which is the maximum + size (in bytes) of external files to include. The default limit is what + I think should be a reasonably conservative number, 2048 bytes. + """ + + name = 'datauri' + options = { + 'max_size': 'DATAURI_MAX_SIZE', + } + + def replace_url(self, url): + if url.startswith('data:'): + # Don't even both sending data: through urlparse(), + # who knows how well it'll deal with a lot of data. + return + + # Ignore any urls which are not relative + parsed = urlparse.urlparse(url) + if parsed.scheme or parsed.netloc or parsed.path.startswith('/'): + return + + # Since this runs BEFORE cssrewrite, we can thus assume that urls + # will be relative to the file location. + # + # Notes: + # - Django might need to override this filter for staticfiles if it + # it should be possible to resolve cross-references between + # different directories. + # - For Flask-Assets blueprints, the logic might need to be: + # 1) Take source_path, convert into correct url via absurl(). + # 2) Join with the URL be be replaced. + # 3) Convert url back to the filesystem path to which the url + # would map (the hard part?). + # + + filename = os.path.join(os.path.dirname(self.source_path), url) + + try: + if os.stat(filename).st_size <= (self.max_size or 2048): + with open(filename, 'rb') as f: + data = b64encode(f.read()) + return 'data:%s;base64,%s' % ( + mimetypes.guess_type(filename)[0], data.decode()) + except (OSError, IOError): + # Ignore the file not existing. + # TODO: When we have a logging system, this could produce a warning + return diff --git a/pelican/plugins/webassets/vendor/webassets/filter/dust.py b/pelican/plugins/webassets/vendor/webassets/filter/dust.py new file mode 100644 index 0000000..4623455 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/dust.py @@ -0,0 +1,57 @@ +"""Compile DustJS templates to a single JavaScript file that, when +loaded in the browser, registers automatically. + +""" + +from webassets.filter import ExternalTool + + +__all__ = ('DustJS',) + + +class DustJS(ExternalTool): + """`DustJS `_ templates compilation + filter. + + Takes a directory full ``.dust`` files and creates a single Javascript + object that registers to the ``dust`` global when loaded in the browser:: + + Bundle('js/templates/', filters='dustjs') + + Note that in the above example, a directory is given as the bundle + contents, which is unusual, but required by this filter. + + This uses the ``dusty`` compiler, which is a separate project from the + DustJS implementation. To install ``dusty`` together with LinkedIn's + version of ``dustjs`` (the original does not support NodeJS > 0.4):: + + npm install dusty + rm -rf node_modules/dusty/node_modules/dust + git clone https://github.com/linkedin/dustjs node_modules/dust + + .. note:: + + To generate the DustJS client-side Javascript, you can then do:: + + cd node_modules/dust + make dust + cp dist/dist-core...js your/static/assets/path + + For compilation, set the ``DUSTY_PATH=.../node_modules/dusty/bin/dusty``. + Optionally, set ``NODE_PATH=.../node``. + """ + + name = 'dustjs' + options = {'dusty_path': 'DUSTY_PATH', + 'node_path': 'NODE_PATH'} + max_debug_level = None + + def open(self, out, source_path, **kw): + args = [] + if self.node_path: + args += [self.node_path] + args += [self.dusty_path or 'dusty'] + # no need for --single, as we output to STDOUT + args += [source_path] + + self.subprocess(args, out) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py b/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py new file mode 100644 index 0000000..173a92f --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py @@ -0,0 +1,75 @@ +import subprocess +import os +from os import path + +from webassets.exceptions import FilterError +from webassets.filter.jst import JSTemplateFilter + + +__all__ = ('Handlebars',) + + +class Handlebars(JSTemplateFilter): + """Compile `Handlebars `_ templates. + + This filter assumes that the ``handlebars`` executable is in the path. + Otherwise, you may define a ``HANDLEBARS_BIN`` setting. + + .. note:: + Use this filter if you want to precompile Handlebars templates. + If compiling them in the browser is acceptable, you may use the + JST filter, which needs no external dependency. + + .. warning:: + Currently, this filter is not compatible with input filters. Any + filters that would run during the input-stage will simply be + ignored. Input filters tend to be other compiler-style filters, + so this is unlikely to be an issue. + """ + + # TODO: We should fix the warning above. Either, me make this filter + # support input-processing (we'd have to work with the hunks given to + # us, rather than the original source files), or make webassets raise + # an error if the handlebars filter is combined with an input filter. + # I'm unsure about the best API design. We could support open() + # returning ``True`` to indicate "no input filters allowed" ( + # surprisingly hard to implement) Or, use an attribute to declare + # as much. + + name = 'handlebars' + options = { + 'binary': 'HANDLEBARS_BIN', + 'extra_args': 'HANDLEBARS_EXTRA_ARGS', + 'root': 'HANDLEBARS_ROOT', + } + max_debug_level = None + + def process_templates(self, out, hunks, **kw): + templates = [info['source_path'] for _, info in hunks] + + if self.root is True: + root = self.get_config('directory') + elif self.root: + root = path.join(self.get_config('directory'), self.root) + else: + root = self._find_base_path(templates) + + args = [self.binary or 'handlebars'] + if root: + args.extend(['-r', root]) + if self.extra_args: + args.extend(self.extra_args) + args.extend(templates) + + proc = subprocess.Popen( + args, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate() + + if proc.returncode != 0: + raise FilterError(('handlebars: subprocess had error: stderr=%s, '+ + 'stdout=%s, returncode=%s') % ( + stderr, stdout, proc.returncode)) + out.write(stdout.decode('utf-8').strip() + ';') diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jade.py b/pelican/plugins/webassets/vendor/webassets/filter/jade.py new file mode 100644 index 0000000..16bab7d --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jade.py @@ -0,0 +1,130 @@ +# coding=utf-8 + +from __future__ import print_function +import os, subprocess +from webassets.filter import Filter, register_filter +from webassets.exceptions import FilterError + + +class Jade(Filter): + """Converts `Jade `_ templates to client-side + JavaScript functions. + + Requires the Jade executable to be available externally. You can install it + using the `Node Package Manager `_:: + + $ npm install jade + + Jade templates are compiled and stored in a window-level JavaScript object + under a key corresponding to the template file's basename. For example, + ``keyboardcat.jade`` compiles into: + + window.templates['keyboardcat'] = function() { ... }; + + Supported configuration options: + + JADE_BIN + The system path to the Jade binary. If not set assumes ``jade`` is in + the system path. + + JADE_RUNTIME + The system path to the Jade runtime, ``runtime.js`` which ships with + Jade. If you installed Jade locally it can be found in: + + node_modules/jade/runtime.js + + Globally, on Ubuntu it's typically located in: + + /usr/lib/node_modules/jade/runtime.js + + Or sometimes: + + /usr/local/lib/node_modules/jade/runtime.js + + If, for some reason you can't find your Jade runtime you can download + it from the `Jade Repository `_:: + but do take care to download the runtime version which matches the + version of your installed Jade. + + JADE_NO_DEBUG + Omits debugging information to output shorter functions. + + JADE_TEMPLATE_VAR + The window-level JavaScript object where the compiled Jade objects will + be stored. By default this defaults to ``templates`` as such: + + window['templates'] + """ + + name = 'jade' + max_debug_level = None + options = { + 'jade': 'JADE_BIN', + 'jade_runtime': 'JADE_RUNTIME', + 'jade_no_debug': 'JADE_NO_DEBUG', + 'js_var': 'JADE_TEMPLATE_VAR' + } + argv = [] + + + def setup(self): + """ + Check options and apply defaults if necessary + """ + super(Jade, self).setup() + + self.argv = [self.jade or 'jade'] + self.argv.append('--client') + + if self.jade_no_debug: + self.argv.append('--no-debug') + + if not self.js_var: + self.js_var = 'templates' + + + def input(self, _in, out, **kwargs): + """ + Compile individual Jade templates + """ + proc = subprocess.Popen(self.argv, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate(_in.read()) + + if proc.returncode != 0: + raise FilterError(('jade: subprocess returned a non-success ' + + 'result code: %s, stdout=%s, stderr=%s') + % (proc.returncode, stdout, stderr)) + elif stderr: + print('jade filter has warnings:', stderr) + + # Add a bit of JavaScript that will place our compiled Jade function + # into an object on the `window` object. Jade files are keyed by their + # basename. + key = os.path.splitext(os.path.basename(kwargs['source_path']))[0] + preamble = "window['%s']['%s'] = " % (self.js_var, key) + + out.write('%s%s' % (preamble, stdout.strip())) + + + def output(self, _in, out, **kwargs): + """ + Prepend Jade runtime and initialize template variable. + """ + if self.jade_runtime: + with open(self.jade_runtime) as file: + runtime = ''.join(file.readlines()) + else: + runtime = '' + + # JavaScript code to initialize the window-level object that will hold + # our compiled Jade templates as functions + init = "if(!window['%s']) { window['%s'] = {}; }" % (self.js_var, self.js_var) + + out.write('%s\n%s\n%s' % (runtime, init, _in.read())) + + +register_filter(Jade) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py b/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py new file mode 100644 index 0000000..50a295e --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('Jinja2',) + + +class Jinja2(Filter): + """Process a file through the Jinja2 templating engine. + + Requires the ``jinja2`` package (https://github.com/mitsuhiko/jinja2). + + The Jinja2 context can be specified with the `JINJA2_CONTEXT` configuration + option or directly with `context={...}`. Example: + + .. code-block:: python + + Bundle('input.css', filters=Jinja2(context={'foo': 'bar'})) + + Additionally to enable template loading mechanics from your project you can provide + `JINJA2_ENV` or `jinja2_env` arg to make use of already created environment. + """ + + name = 'jinja2' + max_debug_level = None + options = { + 'context': 'JINJA2_CONTEXT', + 'jinja2_env': 'JINJA2_ENV' + } + + def setup(self): + try: + import jinja2 + except ImportError: + raise EnvironmentError('The "jinja2" package is not installed.') + else: + self.jinja2 = jinja2 + super(Jinja2, self).setup() + + def input(self, _in, out, **kw): + tpl_factory = self.jinja2_env.from_string if self.jinja2_env else self.jinja2.Template + out.write(tpl_factory(_in.read()).render(self.context or {})) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py b/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py new file mode 100644 index 0000000..ff21d7f --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import +import warnings + +from webassets.filter import Filter + + +__all__ = ('JSMin',) + + +class JSMin(Filter): + """Minifies Javascript by removing whitespace, comments, etc. + + This filter uses a Python port of Douglas Crockford's `JSMin + `_, which needs + to be installed separately. + + There are actually multiple implementations available, for + example one by Baruch Even. Easiest to install via PyPI is + the one by Dave St. Germain:: + + $ pip install jsmin + + The filter is tested with this ``jsmin`` package from PyPI, + but will work with any module that exposes a + ``JavascriptMinify`` object with a ``minify`` method. + + If you want to avoid installing another dependency, use the + :class:`webassets.filter.rjsmin.RJSMin` filter instead. + """ + + name = 'jsmin' + + def setup(self): + import jsmin + self.jsmin = jsmin + + def output(self, _in, out, **kw): + if hasattr(self.jsmin, 'JavaScriptMinifier'): + # jsmin.py from v8 + minifier = self.jsmin.JavaScriptMinifier() + minified = minifier.JSMinify(_in.read()) + out.write(minified) + else: + self.jsmin.JavascriptMinify().minify(_in, out) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py new file mode 100644 index 0000000..9fc56b8 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py @@ -0,0 +1,24 @@ +from .jspacker import JavaScriptPacker +from webassets.filter import Filter + + +__all__ = ('JSPacker',) + + +class JSPacker(Filter): + """Reduces the size of Javascript using an inline compression + algorithm, i.e. the script will be unpacked on the client side + by the browser. + + Based on Dean Edwards' `jspacker 2 `_, + as ported by Florian Schulze. + """ + # TODO: This could support options. + + name = 'jspacker' + + def output(self, _in, out, **kw): + out.write(JavaScriptPacker().pack(_in.read(), + compaction=False, + encoding=62, + fastDecode=True)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jspacker/jspacker.py b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/jspacker.py new file mode 100644 index 0000000..e681531 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/jspacker.py @@ -0,0 +1,577 @@ +from __future__ import print_function +## ParseMaster, version 1.0 (pre-release) (2005/05/12) x6 +## Copyright 2005, Dean Edwards +## Web: http://dean.edwards.name/ +## +## This software is licensed under the CC-GNU LGPL +## Web: http://creativecommons.org/licenses/LGPL/2.1/ +## +## Ported to Python by Florian Schulze + +import os, re +import sys +if sys.version < '3': + integer_types = (int, long,) +else: + integer_types = (int,) + +# a multi-pattern parser + +class Pattern: + def __init__(self, expression, replacement, length): + self.expression = expression + self.replacement = replacement + self.length = length + + def __str__(self): + return "(" + self.expression + ")" + +class Patterns(list): + def __str__(self): + return '|'.join([str(e) for e in self]) + +class ParseMaster: + # constants + EXPRESSION = 0 + REPLACEMENT = 1 + LENGTH = 2 + GROUPS = re.compile(r"""\(""", re.M)#g + SUB_REPLACE = re.compile(r"""\$\d""", re.M) + INDEXED = re.compile(r"""^\$\d+$""", re.M) + TRIM = re.compile(r"""(['"])\1\+(.*)\+\1\1$""", re.M) + ESCAPE = re.compile(r"""\\.""", re.M)#g + #QUOTE = re.compile(r"""'""", re.M) + DELETED = re.compile("""\x01[^\x01]*\x01""", re.M)#g + + def __init__(self): + # private + self._patterns = Patterns() # patterns stored by index + self._escaped = [] + self.ignoreCase = False + self.escapeChar = None + + def DELETE(self, match, offset): + return "\x01" + match.group(offset) + "\x01" + + def _repl(self, a, o, r, i): + while (i): + m = a.group(o+i-1) + if m is None: + s = "" + else: + s = m + r = r.replace("$" + str(i), s) + i = i - 1 + r = ParseMaster.TRIM.sub("$1", r) + return r + + # public + def add(self, expression="^$", replacement=None): + if replacement is None: + replacement = self.DELETE + # count the number of sub-expressions + # - add one because each pattern is itself a sub-expression + length = len(ParseMaster.GROUPS.findall(self._internalEscape(str(expression)))) + 1 + # does the pattern deal with sub-expressions? + if (isinstance(replacement, str) and ParseMaster.SUB_REPLACE.match(replacement)): + # a simple lookup? (e.g. "$2") + if (ParseMaster.INDEXED.match(replacement)): + # store the index (used for fast retrieval of matched strings) + replacement = int(replacement[1:]) - 1 + else: # a complicated lookup (e.g. "Hello $2 $1") + # build a function to do the lookup + i = length + r = replacement + replacement = lambda a,o: self._repl(a,o,r,i) + # pass the modified arguments + self._patterns.append(Pattern(expression, replacement, length)) + + # execute the global replacement + def execute(self, string): + if self.ignoreCase: + r = re.compile(str(self._patterns), re.I | re.M) + else: + r = re.compile(str(self._patterns), re.M) + string = self._escape(string, self.escapeChar) + string = r.sub(self._replacement, string) + string = self._unescape(string, self.escapeChar) + string = ParseMaster.DELETED.sub("", string) + return string + + # clear the patterns collections so that this object may be re-used + def reset(self): + self._patterns = Patterns() + + # this is the global replace function (it's quite complicated) + def _replacement(self, match): + i = 1 + # loop through the patterns + for pattern in self._patterns: + if match.group(i) is not None: + replacement = pattern.replacement + if callable(replacement): + return replacement(match, i) + elif isinstance(replacement, integer_types): + return match.group(replacement+i) + else: + return replacement + else: + i = i+pattern.length + + # encode escaped characters + def _escape(self, string, escapeChar=None): + def repl(match): + char = match.group(1) + self._escaped.append(char) + return escapeChar + if escapeChar is None: + return string + r = re.compile("\\"+escapeChar+"(.)", re.M) + result = r.sub(repl, string) + return result + + # decode escaped characters + def _unescape(self, string, escapeChar=None): + def repl(match): + try: + #result = eval("'"+escapeChar + self._escaped.pop(0)+"'") + result = escapeChar + self._escaped.pop(0) + return result + except IndexError: + return escapeChar + if escapeChar is None: + return string + r = re.compile("\\"+escapeChar, re.M) + result = r.sub(repl, string) + return result + + def _internalEscape(self, string): + return ParseMaster.ESCAPE.sub("", string) + + +## packer, version 2.0 (2005/04/20) +## Copyright 2004-2005, Dean Edwards +## License: http://creativecommons.org/licenses/LGPL/2.1/ + +## Ported to Python by Florian Schulze + +## http://dean.edwards.name/packer/ + +class JavaScriptPacker: + def __init__(self): + self._basicCompressionParseMaster = self.getCompressionParseMaster(False) + self._specialCompressionParseMaster = self.getCompressionParseMaster(True) + + def basicCompression(self, script): + return self._basicCompressionParseMaster.execute(script) + + def specialCompression(self, script): + return self._specialCompressionParseMaster.execute(script) + + def getCompressionParseMaster(self, specialChars): + IGNORE = "$1" + parser = ParseMaster() + parser.escapeChar = '\\' + # protect strings + parser.add(r"""'[^']*?'""", IGNORE) + parser.add(r'"[^"]*?"', IGNORE) + # remove comments + parser.add(r"""//[^\n\r]*?[\n\r]""") + parser.add(r"""/\*[^*]*?\*+([^/][^*]*?\*+)*?/""") + # protect regular expressions + parser.add(r"""\s+(\/[^\/\n\r\*][^\/\n\r]*\/g?i?)""", "$2") + parser.add(r"""[^\w\$\/'"*)\?:]\/[^\/\n\r\*][^\/\n\r]*\/g?i?""", IGNORE) + # remove: ;;; doSomething(); + if specialChars: + parser.add(""";;;[^\n\r]+[\n\r]""") + # remove redundant semi-colons + parser.add(r""";+\s*([};])""", "$2") + # remove white-space + parser.add(r"""(\b|\$)\s+(\b|\$)""", "$2 $3") + parser.add(r"""([+\-])\s+([+\-])""", "$2 $3") + parser.add(r"""\s+""", "") + return parser + + def getEncoder(self, ascii): + mapping = {} + base = ord('0') + mapping.update(dict([(i, chr(i+base)) for i in range(10)])) + base = ord('a') + mapping.update(dict([(i+10, chr(i+base)) for i in range(26)])) + base = ord('A') + mapping.update(dict([(i+36, chr(i+base)) for i in range(26)])) + base = 161 + mapping.update(dict([(i+62, chr(i+base)) for i in range(95)])) + + # zero encoding + # characters: 0123456789 + def encode10(charCode): + return str(charCode) + + # inherent base36 support + # characters: 0123456789abcdefghijklmnopqrstuvwxyz + def encode36(charCode): + l = [] + remainder = charCode + while 1: + result, remainder = divmod(remainder, 36) + l.append(mapping[remainder]) + if not result: + break + remainder = result + l.reverse() + return "".join(l) + + # hitch a ride on base36 and add the upper case alpha characters + # characters: 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + def encode62(charCode): + l = [] + remainder = charCode + while 1: + result, remainder = divmod(remainder, 62) + l.append(mapping[remainder]) + if not result: + break + remainder = result + l.reverse() + return "".join(l) + + # use high-ascii values + def encode95(charCode): + l = [] + remainder = charCode + while 1: + result, remainder = divmod(remainder, 95) + l.append(mapping[remainder+62]) + if not result: + break + remainder = result + l.reverse() + return "".join(l) + + if ascii <= 10: + return encode10 + elif ascii <= 36: + return encode36 + elif ascii <= 62: + return encode62 + return encode95 + + def escape(self, script): + script = script.replace("\\","\\\\") + script = script.replace("'","\\'") + script = script.replace('\n','\\n') + #return re.sub(r"""([\\'](?!\n))""", "\\$1", script) + return script + + def escape95(self, script): + result = [] + for x in script: + if x>'\xa1': + x = "\\x%0x" % ord(x) + result.append(x) + return "".join(result) + + def encodeKeywords(self, script, encoding, fastDecode): + # escape high-ascii values already in the script (i.e. in strings) + if (encoding > 62): + script = self.escape95(script) + # create the parser + parser = ParseMaster() + encode = self.getEncoder(encoding) + # for high-ascii, don't encode single character low-ascii + if encoding > 62: + regexp = r"""\w\w+""" + else: + regexp = r"""\w+""" + # build the word list + keywords = self.analyze(script, regexp, encode) + encoded = keywords['encoded'] + # encode + def repl(match, offset): + return encoded.get(match.group(offset), "") + parser.add(regexp, repl) + # if encoded, wrap the script in a decoding function + script = parser.execute(script) + script = self.bootStrap(script, keywords, encoding, fastDecode) + return script + + def analyze(self, script, regexp, encode): + # analyse + # retrieve all words in the script + regexp = re.compile(regexp, re.M) + all = regexp.findall(script) + sorted = [] # list of words sorted by frequency + encoded = {} # dictionary of word->encoding + protected = {} # instances of "protected" words + if all: + unsorted = [] + _protected = {} + values = {} + count = {} + all.reverse() + for word in all: + word = "$"+word + if word not in count: + count[word] = 0 + j = len(unsorted) + unsorted.append(word) + # make a dictionary of all of the protected words in this script + # these are words that might be mistaken for encoding + values[j] = encode(j) + _protected["$"+values[j]] = j + count[word] = count[word] + 1 + # prepare to sort the word list, first we must protect + # words that are also used as codes. we assign them a code + # equivalent to the word itself. + # e.g. if "do" falls within our encoding range + # then we store keywords["do"] = "do"; + # this avoids problems when decoding + sorted = [None] * len(unsorted) + for word in unsorted: + if word in _protected and isinstance(_protected[word], int): + sorted[_protected[word]] = word[1:] + protected[_protected[word]] = True + count[word] = 0 + unsorted.sort(key=lambda a: count[a]) + j = 0 + for i in range(len(sorted)): + if sorted[i] is None: + sorted[i] = unsorted[j][1:] + j = j + 1 + encoded[sorted[i]] = values[i] + return {'sorted': sorted, 'encoded': encoded, 'protected': protected} + + def encodePrivate(self, charCode): + return "_"+str(charCode) + + def encodeSpecialChars(self, script): + parser = ParseMaster() + # replace: $name -> n, $$name -> $$na + def repl(match, offset): + #print offset, match.groups() + length = len(match.group(offset + 2)) + start = length - max(length - len(match.group(offset + 3)), 0) + return match.group(offset + 1)[start:start+length] + match.group(offset + 4) + parser.add(r"""((\$+)([a-zA-Z\$_]+))(\d*)""", repl) + # replace: _name -> _0, double-underscore (__name) is ignored + regexp = r"""\b_[A-Za-z\d]\w*""" + # build the word list + keywords = self.analyze(script, regexp, self.encodePrivate) + # quick ref + encoded = keywords['encoded'] + def repl(match, offset): + return encoded.get(match.group(offset), "") + parser.add(regexp, repl) + return parser.execute(script) + + # build the boot function used for loading and decoding + def bootStrap(self, packed, keywords, encoding, fastDecode): + ENCODE = re.compile(r"""\$encode\(\$count\)""") + # $packed: the packed script + #packed = self.escape(packed) + #packed = [packed[x*10000:(x+1)*10000] for x in range((len(packed)/10000)+1)] + #packed = "'" + "'+\n'".join(packed) + "'\n" + packed = "'" + self.escape(packed) + "'" + + # $count: number of words contained in the script + count = len(keywords['sorted']) + + # $ascii: base for encoding + ascii = min(count, encoding) or 1 + + # $keywords: list of words contained in the script + for i in keywords['protected']: + keywords['sorted'][i] = "" + # convert from a string to an array + keywords = "'" + "|".join(keywords['sorted']) + "'.split('|')" + + encoding_functions = { + 10: """ function($charCode) { + return $charCode; + }""", + 36: """ function($charCode) { + return $charCode.toString(36); + }""", + 62: """ function($charCode) { + return ($charCode < _encoding ? "" : arguments.callee(parseInt($charCode / _encoding))) + + (($charCode = $charCode % _encoding) > 35 ? String.fromCharCode($charCode + 29) : $charCode.toString(36)); + }""", + 95: """ function($charCode) { + return ($charCode < _encoding ? "" : arguments.callee($charCode / _encoding)) + + String.fromCharCode($charCode % _encoding + 161); + }""" + } + + # $encode: encoding function (used for decoding the script) + encode = encoding_functions[encoding] + encode = encode.replace('_encoding',"$ascii") + encode = encode.replace('arguments.callee', "$encode") + if ascii > 10: + inline = "$count.toString($ascii)" + else: + inline = "$count" + # $decode: code snippet to speed up decoding + if fastDecode: + # create the decoder + decode = r"""// does the browser support String.replace where the + // replacement value is a function? + if (!''.replace(/^/, String)) { + // decode all the values we need + while ($count--) $decode[$encode($count)] = $keywords[$count] || $encode($count); + // global replacement function + $keywords = [function($encoded){return $decode[$encoded]}]; + // generic match + $encode = function(){return'\\w+'}; + // reset the loop counter - we are now doing a global replace + $count = 1; + }""" + if encoding > 62: + decode = decode.replace('\\\\w', "[\\xa1-\\xff]") + else: + # perform the encoding inline for lower ascii values + if ascii < 36: + decode = ENCODE.sub(inline, decode) + # special case: when $count==0 there ar no keywords. i want to keep + # the basic shape of the unpacking function so i'll frig the code... + if not count: + raise NotImplemented + #) $decode = $decode.replace(/(\$count)\s*=\s*1/, "$1=0"); + + + # boot function + unpack = r"""function($packed, $ascii, $count, $keywords, $encode, $decode) { + while ($count--) + if ($keywords[$count]) + $packed = $packed.replace(new RegExp("\\b" + $encode($count) + "\\b", "g"), $keywords[$count]); + return $packed; + }""" + if fastDecode: + # insert the decoder + #unpack = re.sub(r"""\{""", "{" + decode + ";", unpack) + unpack = unpack.replace('{', "{" + decode + ";", 1) + + if encoding > 62: # high-ascii + # get rid of the word-boundaries for regexp matches + unpack = re.sub(r"""'\\\\b'\s*\+|\+\s*'\\\\b'""", "", unpack) + if ascii > 36 or encoding > 62 or fastDecode: + # insert the encode function + #unpack = re.sub(r"""\{""", "{$encode=" + encode + ";", unpack) + unpack = unpack.replace('{', "{$encode=" + encode + ";", 1) + else: + # perform the encoding inline + unpack = ENCODE.sub(inline, unpack) + # pack the boot function too + unpack = self.pack(unpack, 0, False, True) + + # arguments + params = [packed, str(ascii), str(count), keywords] + if fastDecode: + # insert placeholders for the decoder + params.extend(['0', "{}"]) + + # the whole thing + return "eval(" + unpack + "(" + ",".join(params) + "))\n"; + + def pack(self, script, encoding=0, fastDecode=False, specialChars=False, compaction=True): + script = script+"\n" + self._encoding = encoding + self._fastDecode = fastDecode + if specialChars: + script = self.specialCompression(script) + script = self.encodeSpecialChars(script) + else: + if compaction: + script = self.basicCompression(script) + if encoding: + script = self.encodeKeywords(script, encoding, fastDecode) + return script + +def run(): + p = JavaScriptPacker() + script = open('test_plone.js').read() + result = p.pack(script, compaction=False, encoding=62, fastDecode=True) + open('output.js','w').write(result) + +def run1(): + + test_scripts = [] + + test_scripts.append(("""// ----------------------------------------------------------------------- +// public interface +// ----------------------------------------------------------------------- + +cssQuery.toString = function() { + return "function cssQuery() {\n [version " + version + "]\n}"; +};""", 0, False, False, """cssQuery.toString=function(){return"function cssQuery() {\n [version "+version+"]\n}"};""")) + + test_scripts.append(("""function test(_localvar) { + var $name = 'foo'; + var $$dummy = 2; + + return $name + $$dummy; +}""", 0, False, True, """function test(_0){var n='foo';var du=2;return n+du}""")) + + test_scripts.append(("""function _test($localvar) { + var $name = 1; + var _dummy = 2; + var __foo = 3; + + return $name + _dummy + $localvar + __foo; +}""", 0, False, True, """function _1(l){var n=1;var _0=2;var __foo=3;return n+_0+l+__foo}""")) + + test_scripts.append(("""function _test($localvar) { + var $name = 1; + var _dummy = 2; + var __foo = 3; + + return $name + _dummy + $localvar + __foo; +} + +function _bar(_ocalvar) { + var $name = 1; + var _dummy = 2; + var __foo = 3; + + return $name + _dummy + $localvar + __foo; +}""", 0, False, True, """function _3(l){var n=1;var _0=2;var __foo=3;return n+_0+l+__foo}function _2(_1){var n=1;var _0=2;var __foo=3;return n+_0+l+__foo}""")) + + test_scripts.append(("cssQuery1.js", 0, False, False, "cssQuery1-p1.js")) + test_scripts.append(("cssQuery.js", 0, False, False, "cssQuery-p1.js")) + test_scripts.append(("pack.js", 0, False, False, "pack-p1.js")) + test_scripts.append(("cssQuery.js", 0, False, True, "cssQuery-p2.js")) + # the following ones are different, because javascript might use an + # unstable sort algorithm while python uses an stable sort algorithm + test_scripts.append(("pack.js", 0, False, True, "pack-p2.js")) + test_scripts.append(("test.js", 0, False, True, """function _4(l){var n=1;var _0=2;var __foo=3;return n+_0+l+__foo}function _3(_1){var n=1;var _2=2;var __foo=3;return n+_2+l+__foo}""")) + test_scripts.append(("test.js", 10, False, False, """eval(function(p,a,c,k,e,d){while(c--){if(k[c]){p=p.replace(new RegExp("\\b"+e(c)+"\\b","g"),k[c])}}return p}('8 13($6){0 $4=1;0 7=2;0 5=3;9 $4+7+$6+5}8 11(12){0 $4=1;0 10=2;0 5=3;9 $4+10+$6+5}',10,14,'var||||name|__foo|localvar|_dummy|function|return|_2|_bar|_ocalvar|_test'.split('|'))) +""")) + test_scripts.append(("test.js", 62, False, False, """eval(function(p,a,c,k,e,d){while(c--){if(k[c]){p=p.replace(new RegExp("\\b"+e(c)+"\\b","g"),k[c])}}return p}('8 d($6){0 $4=1;0 7=2;0 5=3;9 $4+7+$6+5}8 b(c){0 $4=1;0 a=2;0 5=3;9 $4+a+$6+5}',14,14,'var||||name|__foo|localvar|_dummy|function|return|_2|_bar|_ocalvar|_test'.split('|'))) +""")) + test_scripts.append(("test.js", 95, False, False, "test-p4.js")) + test_scripts.append(("cssQuery.js", 0, False, True, "cssQuery-p3.js")) + test_scripts.append(("cssQuery.js", 62, False, True, "cssQuery-p4.js")) + + import difflib + p = JavaScriptPacker() + for script, encoding, fastDecode, specialChars, expected in test_scripts: + if os.path.exists(script): + _script = open(script).read() + else: + _script = script + if os.path.exists(expected): + _expected = open(expected).read() + else: + _expected = expected + print(script[:20], encoding, fastDecode, specialChars, expected[:20]) + print("="*40) + result = p.pack(_script, encoding, fastDecode, specialChars) + print(len(result), len(_script)) + if (result != _expected): + print("ERROR!!!!!!!!!!!!!!!!") + print(_expected) + print(result) + #print list(difflib.unified_diff(result, _expected)) + +if __name__=='__main__': + run() diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jst.py b/pelican/plugins/webassets/vendor/webassets/filter/jst.py new file mode 100644 index 0000000..101fedc --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/jst.py @@ -0,0 +1,190 @@ +import os +import re +try: + import json +except ImportError: + import simplejson as json +from webassets.filter import Filter +from webassets.utils import common_path_prefix + + +__all__ = ('JST',) + + +class JSTemplateFilter(Filter): + """Common base class for the JST and Handlebars filters, and + possibly other Javascript templating systems in the future. + """ + + def concat(self, out, hunks, **kwargs): + self.process_templates(out, hunks, **kwargs) + + def process_templates(self, out, hunks, **kw): + raise NotImplementedError() + + def iter_templates_with_base(self, hunks): + """Helper that for list of ``hunks``, as given to + ``concat()``, yields 2-tuples of (name, hunk), with name + being the name of the source file relative to the common + prefix of all source files. + + In other words, each template gets the shortest possible + name to identify it. + """ + base_path = self._find_base_path( + [info['source_path'] for _, info in hunks]) + os.path.sep + for hunk, info in hunks: + name = info['source_path'] + name = name[len(base_path):] + name = os.path.splitext(name)[0] + yield name, hunk + + def _find_base_path(self, paths): + """Hmmm. There should aways be some common base path.""" + if len(paths) == 1: + return os.path.dirname(paths[0]) + return common_path_prefix(paths) + + +class JST(JSTemplateFilter): + """This filter processes generic JavaScript templates. It will generate + JavaScript code that runs all files through a template compiler, and makes + the templates available as an object. + + It was inspired by `Jammit`_. + + For example, if you have a file named ``license.jst``: + + .. code-block:: html + +
+

Name: <%= name %>

+ Hometown: <%= birthplace %> +
+ + Then, after applying this filter, you could use the template in JavaScript: + + .. code-block:: javascript + + JST.license({name : "Moe", birthplace : "Brooklyn"}); + + The name of each template is derived from the filename. If your JST files + are spread over different directories, the path up to the common prefix + will be included. For example:: + + Bundle('templates/app1/license.jst', 'templates/app2/profile.jst', + filters='jst') + + will make the templates available as ``app1/license`` and ``app2/profile``. + + .. note:: + The filter is "generic" in the sense that it does not actually compile + the templates, but wraps them in a JavaScript function call, and can + thus be used with any template language. webassets also has filters + for specific JavaScript template languages like + :class:`~.filter.dust.DustJS` or + :class:`~.filter.handlebars.Handlebars`, and those filters precompile + the templates on the server, which means a performance boost on the + client-side. + + Unless configured otherwise, the filter will use the same micro-templating + language that `Jammit`_ uses, which is turn is the same one that is + available in `underscore.js`_. The JavaScript code necessary to compile + such templates will implicitly be included in the filter output. + + *Supported configuration options:* + + JST_COMPILER (template_function) + A string that is inserted into the generated JavaScript code in place + of the function to be called that should do the compiling. Unless you + specify a custom function here, the filter will include the JavaScript + code of it's own micro-templating language, which is the one used by + `underscore.js`_ and `Jammit`_. + + If you assign a custom function, it is your responsibility to ensure + that it is available in your final JavaScript. + + If this option is set to ``False``, then the template strings will be + output directly, which is to say, ``JST.foo`` will be a string holding + the raw source of the ``foo`` template. + + JST_NAMESPACE (namespace) + How the templates should be made available in JavaScript. Defaults to + ``window.JST``, which gives you a global ``JST`` object. + + JST_BARE (bare) + Whether everything generated by this filter should be wrapped inside + an anonymous function. Default to ``False``. + + .. note:: + + If you enable this option, the namespace must be a property + of the ``window`` object, or you won't be able to access the + templates. + + JST_DIR_SEPARATOR (separator) + The separator character to use for templates within directories. + Defaults to '/' + + .. _Jammit: + .. _underscore.js: http://documentcloud.github.com/underscore/#template + """ + name = 'jst' + options = { + # The JavaScript compiler function to use + 'template_function': 'JST_COMPILER', + # The JavaScript namespace to put templates in + 'namespace': 'JST_NAMESPACE', + # Wrap everything in a closure + 'bare': 'JST_BARE', + # The path separator to use with templates in different directories + 'separator': 'JST_DIR_SEPARATOR' + } + max_debug_level = None + + def setup(self): + super(JST, self).setup() + self.include_jst_script = (self.template_function == 'template') \ + or self.template_function is None + + def process_templates(self, out, hunks, **kwargs): + namespace = self.namespace or 'window.JST' + + if self.bare is False: + out.write("(function(){\n") + + out.write("%s = %s || {};\n" % (namespace, namespace)) + + if self.include_jst_script: + out.write("%s\n" % _jst_script) + + for name, hunk in self.iter_templates_with_base(hunks): + # Make it a valid Javascript string. + contents = json.dumps(hunk.data()) + + out.write("%s['%s'] = " % (namespace, self._get_jst_name(name))) + if self.template_function is False: + out.write("%s;\n" % (contents)) + else: + out.write("%s(%s);\n" % ( + self.template_function or 'template', contents)) + + if self.bare is False: + out.write("})();") + + def _get_jst_name(self, name): + """Return the name for the JST with any path separators normalised""" + return _path_separator_re.sub(self.separator or "/", name) + + +_path_separator_re = re.compile(r'[/\\]+') + +_jst_script = 'var template = function(str){var fn = new Function(\'obj\', \'var \ +__p=[],print=function(){__p.push.apply(__p,arguments);};\ +with(obj||{}){__p.push(\\\'\'+str.replace(/\\\\/g, \'\\\\\\\\\')\ +.replace(/\'/g, "\\\\\'").replace(/<%=([\\s\\S]+?)%>/g,\ +function(match,code){return "\',"+code.replace(/\\\\\'/g, "\'")+",\'";})\ +.replace(/<%([\\s\\S]+?)%>/g,function(match,code){return "\');"+code\ +.replace(/\\\\\'/g, "\'").replace(/[\\r\\n\\t]/g,\' \')+"__p.push(\'";})\ +.replace(/\\r/g,\'\\\\r\').replace(/\\n/g,\'\\\\n\')\ +.replace(/\\t/g,\'\\\\t\')+"\');}return __p.join(\'\');");return fn;};' diff --git a/pelican/plugins/webassets/vendor/webassets/filter/less.py b/pelican/plugins/webassets/vendor/webassets/filter/less.py new file mode 100644 index 0000000..c228e3e --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/less.py @@ -0,0 +1,145 @@ +from __future__ import with_statement + +import os + +from webassets.filter import ExternalTool +from webassets.utils import working_directory + + +class Less(ExternalTool): + """Converts `less `_ markup to real CSS. + + This depends on the NodeJS implementation of less, installable via npm. + To use the old Ruby-based version (implemented in the 1.x Ruby gem), see + :class:`~.less_ruby.Less`. + + *Supported configuration options*: + + LESS_BIN (binary) + Path to the less executable used to compile source files. By default, + the filter will attempt to run ``lessc`` via the system path. + + LESS_LINE_NUMBERS (line_numbers) + Outputs filename and line numbers. Can be either 'comments', which + will output the debug info within comments, 'mediaquery' that will + output the information within a fake media query which is compatible + with the SASSPath to the less executable used to compile source files. + + LESS_RUN_IN_DEBUG (run_in_debug) + By default, the filter will compile in debug mode. Since the less + compiler is written in Javascript and capable of running in the + browser, you can set this to ``False`` to have your original less + source files served (see below). + + LESS_PATHS (paths) + Add include paths for less command line. + It should be a list of paths relatives to Environment.directory or absolute paths. + Order matters as less will pick the first file found in path order. + + LESS_AS_OUTPUT (boolean) + By default, this works as an "input filter", meaning ``less`` is + called for each source file in the bundle. This is because the + path of the source file is required so that @import directives + within the Less file can be correctly resolved. + + However, it is possible to use this filter as an "output filter", + meaning the source files will first be concatenated, and then the + Less filter is applied in one go. This can provide a speedup for + bigger projects. + + .. admonition:: Compiling less in the browser + + less is an interesting case because it is written in Javascript and + capable of running in the browser. While for performance reason you + should prebuild your stylesheets in production, while developing you + may be interested in serving the original less files to the client, + and have less compile them in the browser. + + To do so, you first need to make sure the less filter is not applied + when :attr:`Environment.debug` is ``True``. You can do so via an + option:: + + env.config['less_run_in_debug'] = False + + Second, in order for the less to identify the less source files as + needing to be compiled, they have to be referenced with a + ``rel="stylesheet/less"`` attribute. One way to do this is to use the + :attr:`Bundle.extra` dictionary, which works well with the template + tags that webassets provides for some template languages:: + + less_bundle = Bundle( + '**/*.less', + filters='less', + extra={'rel': 'stylesheet/less' if env.debug else 'stylesheet'} + ) + + Then, for example in a Jinja2 template, you would write:: + + {% assets less_bundle %} + + {% endassets %} + + With this, the ```` tag will sport the correct ``rel`` value both + in development and in production. + + Finally, you need to include the less compiler:: + + if env.debug: + js_bundle.contents += 'http://lesscss.googlecode.com/files/less-1.3.0.min.js' + """ + + name = 'less' + options = { + 'less': ('binary', 'LESS_BIN'), + 'run_in_debug': 'LESS_RUN_IN_DEBUG', + 'line_numbers': 'LESS_LINE_NUMBERS', + 'extra_args': 'LESS_EXTRA_ARGS', + 'paths': 'LESS_PATHS', + 'as_output': 'LESS_AS_OUTPUT' + } + max_debug_level = None + + def setup(self): + super(Less, self).setup() + if self.run_in_debug is False: + # Disable running in debug mode for this instance. + self.max_debug_level = False + + def resolve_source(self, path): + return self.ctx.resolver.resolve_source(self.ctx, path) + + def _apply_less(self, in_, out, source_path=None, **kw): + # Set working directory to the source file so that includes are found + args = self.parse_binary(self.less or 'lessc') + if self.line_numbers: + args.append('--line-numbers=%s' % self.line_numbers) + + if self.paths: + paths = [ + path if os.path.isabs(path) else self.resolve_source(path) + for path in self.paths + ] + args.append('--include-path={0}'.format(os.pathsep.join(paths))) + + if self.extra_args: + args.extend(self.extra_args) + + args.append('-') + + if source_path: + with working_directory(filename=source_path): + self.subprocess(args, out, in_) + else: + self.subprocess(args, out, in_) + + def input(self, _in, out, source_path, output_path, **kw): + if self.as_output: + out.write(_in.read()) + else: + self._apply_less(_in, out, source_path) + + def output(self, _in, out, **kwargs): + if not self.as_output: + out.write(_in.read()) + else: + self._apply_less(_in, out) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py b/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py new file mode 100644 index 0000000..b85e8cc --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py @@ -0,0 +1,84 @@ +import time +import os, subprocess +import tempfile + +from webassets.filter import Filter +from webassets.exceptions import FilterError + + +__all__ = ('Less',) + + +class Less(Filter): + """Converts `Less `_ markup to real CSS. + + This uses the old Ruby implementation available in the 1.x versions of the + less gem. All 2.x versions of the gem are wrappers around the newer + NodeJS/Javascript implementation, which you are generally encouraged to + use, and which is available in webassets via the :class:`~.filter.less.Less` + filter. + + This filter for the Ruby version is being kept around for + backwards-compatibility. + + *Supported configuration options*: + + LESS_RUBY_PATH (binary) + Path to the less executable used to compile source files. By default, + the filter will attempt to run ``lessc`` via the system path. + """ + + # XXX Deprecate this one. + """ + XXX: Depending on how less is actually used in practice, it might actually + be a valid use case to NOT have this be a source filter, so that one can + split the css files into various less files, referencing variables in other + files' - without using @include, instead having them merged together by + django-assets. This will currently not work because we compile each + file separately, and the compiler would fail at undefined variables. + """ + + name = 'less_ruby' + options = { + 'less': ('binary', 'LESS_RUBY_PATH') + } + max_debug_level = None + + def open(self, out, sourcePath, **kw): + """Less currently doesn't take data from stdin, and doesn't allow + us from stdout either. Neither does it return a proper non-0 error + code when an error occurs, or even write to stderr (stdout instead)! + + Hopefully this will improve in the future: + + http://groups.google.com/group/lesscss/browse_thread/thread/3aed033a44c51b4c/b713148afde87e81 + """ + # TODO: Use NamedTemporaryFile. + outtemp_name = os.path.join(tempfile.gettempdir(), + 'assets_temp_%d.css' % int(time.time())) + + proc = subprocess.Popen( + [self.less or 'lessc', sourcePath, outtemp_name], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + # shell: necessary on windows to execute + # ruby files, but doesn't work on linux. + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate() + + # less only writes to stdout, as noted in the method doc, but + # check everything anyway. + if stdout or stderr or proc.returncode != 0: + if os.path.exists(outtemp_name): + os.unlink(outtemp_name) + raise FilterError(('less: subprocess had error: stderr=%s, '+ + 'stdout=%s, returncode=%s') % ( + stderr, stdout, proc.returncode)) + + outtemp = open(outtemp_name) + try: + out.write(outtemp.read()) + finally: + outtemp.close() + + os.unlink(outtemp_name) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/libsass.py b/pelican/plugins/webassets/vendor/webassets/filter/libsass.py new file mode 100644 index 0000000..6867be3 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/libsass.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" +This filter based on Jesús Jerez code [1]. + +[1] https://bitbucket.org/jhuss/webassets-libsass +""" + +from __future__ import print_function +from __future__ import absolute_import + +from webassets.filter import Filter + + +__all__ = ('LibSass',) + + +class LibSass(Filter): + """Converts `Sass `_ markup to real CSS. + + Requires the ``libsass`` package (https://pypi.python.org/pypi/libsass):: + + pip install libsass + + `libsass `_ is binding to C/C++ + implementation of a Sass compiler `Libsass + `_ + + *Configuration options:* + + LIBSASS_STYLE (style) + an optional coding style of the compiled result. choose one of: + `nested` (default), `expanded`, `compact`, `compressed` + + LIBSASS_INCLUDES (includes) + an optional list of paths to find @imported SASS/CSS source files + + LIBSASS_AS_OUTPUT + use this filter as an "output filter", meaning the source files + will first be concatenated, and then the Sass filter is applied. + + See libsass documentation for full documentation about these configuration + options: + + http://hongminhee.org/libsass-python/sass.html#sass.compile + + *Example:* + + Define a bundle for ``style.scss`` that contains ``@imports`` to files in + subfolders: + + .. code-block:: python + + Bundle('style.scss', filters='libsass', output='style.css', depends='**/*.scss') + + """ + name = 'libsass' + options = { + 'style': 'LIBSASS_STYLE', + 'includes': 'LIBSASS_INCLUDES', + 'as_output': 'LIBSASS_AS_OUTPUT', + } + max_debug_level = None + + def _apply_sass(self, _in, out): + args = dict( + output_style=self.style, + include_paths=(self.includes if self.includes else []) + ) + + if self.as_output: + args['string'] = _in.read() + else: + args['filename'] = _in + + out.write( + # http://hongminhee.org/libsass-python/sass.html#sass.compile + self.sass.compile(**args) + ) + + def setup(self): + super(LibSass, self).setup() + + try: + import sass + except ImportError: + raise EnvironmentError('The "libsass" package is not installed.') + else: + self.sass = sass + + if not self.style: + self.style = 'nested' + + def input(self, _in, out, source_path, **kwargs): + if self.as_output: + out.write(_in.read()) + else: + self._apply_sass(source_path, out) + + def output(self, _in, out, **kwargs): + if not self.as_output: + out.write(_in.read()) + else: + self._apply_sass(_in, out) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py b/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py new file mode 100644 index 0000000..cbb38a0 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py @@ -0,0 +1,105 @@ +import os +import subprocess + +from webassets.exceptions import FilterError + +from .sass import Sass + + +__all__ = ('NodeSass', ) + + +class NodeSass(Sass): + """Converts `Scss `_ markup to real CSS. + + This uses node-sass which is a wrapper around libsass. + + This is an alternative to using the ``sass`` or ``scss`` filters, + which are based on the original, external tools. + + *Supported configuration options:* + + NODE_SASS_DEBUG_INFO (debug_info) + Include debug information in the output + + If unset, the default value will depend on your + :attr:`Environment.debug` setting. + + NODE_SASS_LOAD_PATHS (load_paths) + Additional load paths that node-sass should use. + + NODE_SASS_STYLE (style) + The style of the output CSS. Can be one of ``nested`` (default), + ``compact``, ``compressed``, or ``expanded``. + + NODE_SASS_CLI_ARGS (cli_args) + Additional cli arguments + """ + + name = 'node-sass' + options = { + 'binary': 'NODE_SASS_BIN', + 'debug_info': 'NODE_SASS_DEBUG_INFO', + 'use_scss': ('scss', 'NODE_SASS_USE_SCSS'), + 'as_output': 'NODE_SASS_AS_OUTPUT', + 'load_paths': 'NODE_SASS_LOAD_PATHS', + 'style': 'NODE_SASS_STYLE', + 'cli_args': 'NODE_SASS_CLI_ARGS', + } + max_debug_level = None + + def _apply_sass(self, _in, out, cd=None): + # Switch to source file directory if asked, so that this directory + # is by default on the load path. We could pass it via --include-paths, but then + # files in the (undefined) wd could shadow the correct files. + old_dir = os.getcwd() + if cd: + os.chdir(cd) + + try: + args = [self.binary or 'node-sass', + '--output-style', self.style or 'expanded'] + + if not self.use_scss: + args.append("--indented-syntax") + + if (self.ctx.environment.debug if self.debug_info is None else self.debug_info): + args.append('--debug-info') + for path in self.load_paths or []: + args.extend(['--include-path', path]) + + if (self.cli_args): + args.extend(self.cli_args) + + proc = subprocess.Popen(args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + # shell: necessary on windows to execute + # ruby files, but doesn't work on linux. + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate(_in.read().encode('utf-8')) + + if proc.returncode != 0: + raise FilterError(('sass: subprocess had error: stderr=%s, '+ + 'stdout=%s, returncode=%s') % ( + stderr, stdout, proc.returncode)) + elif stderr: + print("node-sass filter has warnings:", stderr) + + out.write(stdout.decode('utf-8')) + finally: + if cd: + os.chdir(old_dir) + + +class NodeSCSS(NodeSass): + """Version of the ``node-sass`` filter that uses the SCSS syntax. + """ + + name = 'node-scss' + + def __init__(self, *a, **kw): + assert not 'scss' in kw + kw['scss'] = True + super(NodeSCSS, self).__init__(*a, **kw) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/postcss.py b/pelican/plugins/webassets/vendor/webassets/filter/postcss.py new file mode 100644 index 0000000..479402b --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/postcss.py @@ -0,0 +1,58 @@ +from __future__ import with_statement + +from webassets.filter import ExternalTool +from webassets.utils import working_directory + + +class PostCSS(ExternalTool): + """Processes CSS code using `PostCSS `_. + + Requires the ``postcss`` executable to be available externally. + To install it, you might be able to do:: + + $ npm install --global postcss + + You should also install the plugins you want to use:: + + $ npm install --global postcss-cssnext + + You can configure postcss in ``postcss.config.js``: + + .. code-block:: javascript + + module.exports = { + plugins: [ + require('postcss-cssnext')({ + // optional configuration for cssnext + }) + ], + }; + + *Supported configuration options*: + + POSTCSS_BIN + Path to the postcss executable used to compile source files. By + default, the filter will attempt to run ``postcss`` via the + system path. + + POSTCSS_EXTRA_ARGS + Additional command-line options to be passed to ``postcss`` using this + setting, which expects a list of strings. + + """ + name = 'postcss' + + options = { + 'binary': 'POSTCSS_BIN', + 'extra_args': 'POSTCSS_EXTRA_ARGS', + } + + max_debug_level = None + + def input(self, in_, out, source_path, **kw): + # Set working directory to the source file so that includes are found + args = [self.binary or 'postcss'] + if self.extra_args: + args.extend(self.extra_args) + with working_directory(filename=source_path): + self.subprocess(args, out, in_) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py b/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py new file mode 100644 index 0000000..cfa6637 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py @@ -0,0 +1,156 @@ +import os + +from webassets.filter import Filter +from webassets.utils import working_directory + + +__all__ = ('PyScss',) + + +class PyScss(Filter): + """Converts `Scss `_ markup to real CSS. + + This uses `PyScss `_, a native + Python implementation of the Scss language. The PyScss module needs + to be installed. It's API has been changing; currently, version + 1.1.5 is known to be supported. + + This is an alternative to using the ``sass`` or ``scss`` filters, + which are based on the original, external tools. + + .. note:: + The Sass syntax is not supported by PyScss. You need to use + the ``sass`` filter based on the original Ruby implementation + instead. + + *Supported configuration options:* + + PYSCSS_DEBUG_INFO (debug_info) + Include debug information in the output for use with FireSass. + + If unset, the default value will depend on your + :attr:`Environment.debug` setting. + + PYSCSS_LOAD_PATHS (load_paths) + Additional load paths that PyScss should use. + + .. warning:: + The filter currently does not automatically use + :attr:`Environment.load_path` for this. + + PYSCSS_STATIC_ROOT (static_root) + The directory PyScss should look in when searching for include + files that you have referenced. Will use + :attr:`Environment.directory` by default. + + PYSCSS_STATIC_URL (static_url) + The url PyScss should use when generating urls to files in + ``PYSCSS_STATIC_ROOT``. Will use :attr:`Environment.url` by + default. + + PYSCSS_ASSETS_ROOT (assets_root) + The directory PyScss should look in when searching for things + like images that you have referenced. Will use + ``PYSCSS_STATIC_ROOT`` by default. + + PYSCSS_ASSETS_URL (assets_url) + The url PyScss should use when generating urls to files in + ``PYSCSS_ASSETS_ROOT``. Will use ``PYSCSS_STATIC_URL`` by + default. + + PYSCSS_STYLE (style) + The style of the output CSS. Can be one of ``nested`` (default), + ``compact``, ``compressed``, or ``expanded``. + """ + + # TODO: PyScss now allows STATIC_ROOT to be a callable, though + # none of the other pertitent values are allowed to be, so this + # is probably not good enough for us. + + name = 'pyscss' + options = { + 'debug_info': 'PYSCSS_DEBUG_INFO', + 'load_paths': 'PYSCSS_LOAD_PATHS', + 'static_root': 'PYSCSS_STATIC_ROOT', + 'static_url': 'PYSCSS_STATIC_URL', + 'assets_root': 'PYSCSS_ASSETS_ROOT', + 'assets_url': 'PYSCSS_ASSETS_URL', + 'style': 'PYSCSS_STYLE', + } + max_debug_level = None + + def setup(self): + super(PyScss, self).setup() + + import scss + self.scss = scss + + if self.style: + try: + from packaging.version import Version + except ImportError: + from distutils.version import LooseVersion as Version + assert Version(scss.__version__) >= Version('1.2.0'), \ + 'PYSCSS_STYLE only supported in pyScss>=1.2.0' + + # Initialize various settings: + # Why are these module-level, not instance-level ?! + # TODO: It appears that in the current dev version, the + # settings can finally passed to a constructor. We'll need + # to support this. + + # Only the dev version appears to support a list + if self.load_paths: + scss.config.LOAD_PATHS = ','.join(self.load_paths) + + # These are needed for various helpers (working with images + # etc.). Similar to the compass filter, we require the user + # to specify such paths relative to the media directory. + try: + scss.config.STATIC_ROOT = self.static_root or self.ctx.directory + scss.config.STATIC_URL = self.static_url or self.ctx.url + except EnvironmentError: + raise EnvironmentError('Because Environment.url and/or ' + 'Environment.directory are not set, you need to ' + 'provide values for the PYSCSS_STATIC_URL and/or ' + 'PYSCSS_STATIC_ROOT settings.') + + # This directory PyScss will use when generating new files, + # like a spritemap. Maybe we should REQUIRE this to be set. + scss.config.ASSETS_ROOT = self.assets_root or scss.config.STATIC_ROOT + scss.config.ASSETS_URL = self.assets_url or scss.config.STATIC_URL + + def input(self, _in, out, **kw): + """Like the original sass filter, this also needs to work as + an input filter, so that relative @imports can be properly + resolved. + """ + + source_path = kw['source_path'] + + # Because PyScss always puts the current working dir at first + # place of the load path, this is what we need to use to make + # relative references work. + with working_directory(os.path.dirname(source_path)): + + scss_opts = { + 'debug_info': ( + self.ctx.environment.debug if self.debug_info is None else self.debug_info), + } + if self.style: + scss_opts['style'] = self.style + else: + scss_opts['compress'] = False + + scss = self.scss.Scss( + scss_opts=scss_opts, + # This is rather nice. We can pass along the filename, + # but also give it already preprocessed content. + scss_files={source_path: _in.read()}) + + # Compile + # Note: This will not throw an error when certain things + # are wrong, like an include file missing. It merely outputs + # to stdout, via logging. We might have to do something about + # this, and evaluate such problems to an exception. + out.write(scss.compile()) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py b/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py new file mode 100644 index 0000000..b76325c --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('RCSSMin',) + + +class RCSSMin(Filter): + """Minifies CSS. + + Requires the ``rcssmin`` package (https://github.com/ndparker/rcssmin). + Alike 'cssmin' it is a port of the YUI CSS compression algorithm but aiming + for speed instead of maximum compression. + + Supported configuration options: + RCSSMIN_KEEP_BANG_COMMENTS (boolean) + Keep bang-comments (comments starting with an exclamation mark). + """ + + name = 'rcssmin' + options = { + 'keep_bang_comments': 'RCSSMIN_KEEP_BANG_COMMENTS', + } + + def setup(self): + super(RCSSMin, self).setup() + try: + import rcssmin + except ImportError: + raise EnvironmentError('The "rcssmin" package is not installed.') + else: + self.rcssmin = rcssmin + + def output(self, _in, out, **kw): + keep = self.keep_bang_comments or False + out.write(self.rcssmin.cssmin(_in.read(), keep_bang_comments=keep)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/replace.py b/pelican/plugins/webassets/vendor/webassets/filter/replace.py new file mode 100644 index 0000000..829face --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/replace.py @@ -0,0 +1,52 @@ +import re +from webassets.filter import ( + Filter, + register_filter +) + + +class ReplaceFilter(Filter): + """ + A filter that allows arbitrary search/replace of strings using a source + regex and a replacement string. Unlike cssrewrite this works on strings + which are not paths and can be used as an output filter. + + Usage: + + replace_static_urls = ReplaceFilter( + pattern=r'\\s*{{\\s*STATIC_URL\\s*}}\\s*', + repl=settings.STATIC_URL, + ) + """ + + name = 'replace' + max_debug_level = None + + def __init__(self, pattern=None, repl=None, as_output=True, **kwargs): + self.pattern = pattern + self.repl = repl + self.as_output = as_output + + super(ReplaceFilter, self).__init__(**kwargs) + + def unique(self): + """ Return a hashable representation of the parameters to allow different instances of this filter. """ + return self.pattern, self.repl + + def _process(self, _in, out, **kwargs): + out.write(re.sub(self.pattern, self.repl, _in.read())) + + def output(self, _in, out, **kwargs): + if self.as_output: + self._process(_in, out, **kwargs) + else: + out.write(_in.read()) + + def input(self, _in, out, **kwargs): + if self.as_output: + out.write(_in.read()) + else: + self._process(_in, out, **kwargs) + + +register_filter(ReplaceFilter) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py b/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py new file mode 100644 index 0000000..536a0e3 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py @@ -0,0 +1,168 @@ +from __future__ import with_statement + +import shlex +from os import path, getcwd + +from webassets.filter import ExternalTool + + +class RequireJSFilter(ExternalTool): + ''' + Optimizes AMD-style modularized JavaScript into a single asset + using `RequireJS `_. + + This depends on the NodeJS executable ``r.js``; install via npm:: + + $ npm install -g requirejs + + Details on configuring r.js can be found at + http://requirejs.org/docs/optimization.html#basics. + + *Supported configuration options*: + + executable (env: REQUIREJS_BIN) + + Path to the RequireJS executable used to compile source + files. By default, the filter will attempt to run ``r.js`` via + the system path. + + config (env: REQUIREJS_CONFIG) + + The RequireJS options file. The path is taken to be relative + to the Environment.directory (by default is /static). + + + baseUrl (env: REQUIREJS_BASEURL) + + The ``baseUrl`` parameter to r.js; this is the directory that + AMD modules will be loaded from. The path is taken relative + to the Environment.directory (by default is /static). + Typically, this is used in + conjunction with a ``baseUrl`` parameter set in the `config` + options file, where the baseUrl value in the config file is + used for client-side processing, and the value here is for + server-side processing. + + optimize (env: REQUIREJS_OPTIMIZE) + + The ``optimize`` parameter to r.js; controls whether or not + r.js minifies the output. By default, it is enabled, but can + be set to ``none`` to disable minification. The typical + scenario to disable minification is if you do some additional + processing of the JavaScript (such as removing + ``console.log()`` lines) before minification by the ``rjsmin`` + filter. + + extras (env: REQUIREJS_EXTRAS) + + Any other command-line parameters to be passed to r.js. The + string is expected to be in unix shell-style format, meaning + that quotes can be used to escape spaces, etc. + + run_in_debug (env: REQUIREJS_RUN_IN_DEBUG) + + Boolean which controls if the AMD requirejs is evaluated + client-side or server-side in debug mode. If set to a truthy + value (e.g. 'yes'), then server-side compilation is done, even + in debug mode. The default is false. + + .. admonition:: Client-side AMD evaluation + + AMD modules can be loaded client-side without any processing + done on the server-side. The advantage to this is that + debugging is easier because the browser can tell you which + source file is responsible for a particular line of code. The + disadvantage is that it means that each loaded AMD module is a + separate HTTP request. When running client-side, the client + needs access to the `config` -- for this reason, when running + in client-side mode, the webassets environment must be + adjusted to include a reference to this + configuration. Typically, this is done by adding something + similar to the following during webassets initialization: + + .. code-block:: python + + if env.debug and not env.config.get('requirejs_run_in_debug', True): + env['requirejs'].contents += ('requirejs-browser-config.js',) + + And the file ``requirejs-browser-config.js`` will look + something like: + + .. code-block:: js + + require.config({baseUrl: '/static/script/'}); + + Set the `run_in_debug` option to control client-side or + server-side compilation in debug. + ''' + + name = 'requirejs' + method = 'open' + options = { + 'executable' : ('executable', 'REQUIREJS_BIN'), + 'config' : ('config', 'REQUIREJS_CONFIG'), + 'baseUrl' : ('baseUrl', 'REQUIREJS_BASEURL'), + 'optimize' : ('optimize', 'REQUIREJS_OPTIMIZE'), + 'extras' : ('extras', 'REQUIREJS_EXTRAS'), + 'run_in_debug' : ('run_in_debug', 'REQUIREJS_RUN_IN_DEBUG'), + } + + max_debug_level = None + + def setup(self): + super(RequireJSFilter, self).setup() + # todo: detect if `r.js` is installed... + if not self.run_in_debug: + # Disable running in debug mode for this instance. + self.max_debug_level = False + + if self.executable: + self.argv = shlex.split(self.executable) + else: + self.argv = ['r.js'] + + if self.config: + rel_config = path.join( + path.relpath( + self.ctx.directory, + getcwd() + ), + self.config + ) + if not self.baseUrl: + self.baseUrl = path.relpath( + self.ctx.directory, + getcwd() + ) + + self.argv.extend( + filter( + None, + ['-o', + rel_config if self.config else None, + 'name={modname}', + 'out={{output}}', + 'baseUrl=' + self.baseUrl if self.baseUrl else None, + 'optimize=' + self.optimize if self.optimize else None, + ]) + ) + if self.extras: + self.argv.extend(shlex.split(self.extras)) + + def open(self, out, source_path, **kw): + if self.ctx.debug and not self.run_in_debug: + with open(source_path, 'rb') as fp: + out.write(fp.read()) + return + # extract the AMD module name + name = kw.get('source') + if not name: + base = path.abspath(self.baseUrl) + name = path.abspath(source_path) + if not name.startswith(base): + raise ValueError( + 'requested AMD script "%s" does not exist in baseUrl "%s"' + % (source_path, self.baseUrl)) + name = name[len(base) + 1:] + kw['modname'] = path.splitext(name)[0] + return super(RequireJSFilter, self).open(out, source_path, **kw) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py new file mode 100644 index 0000000..ff32865 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import +try: + import rjsmin +except ImportError: + from . import rjsmin + + +from webassets.filter import Filter + + +__all__ = ('RJSMin',) + + +class RJSMin(Filter): + """Minifies Javascript by removing whitespace, comments, etc. + + Uses the `rJSmin library `_, + which is included with webassets. However, if you have the external + package installed, it will be used instead. You may want to do this + to get access to the faster C-extension. + + Supported configuration options: + + RJSMIN_KEEP_BANG_COMMENTS (boolean) + Keep bang-comments (comments starting with an exclamation mark). + """ + + name = 'rjsmin' + options = { + 'keep_bang_comments': 'RJSMIN_KEEP_BANG_COMMENTS', + } + + def output(self, _in, out, **kw): + keep = self.keep_bang_comments or False + out.write(rjsmin.jsmin(_in.read(), keep_bang_comments=keep)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/rjsmin.py b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/rjsmin.py new file mode 100755 index 0000000..ef30102 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/rjsmin.py @@ -0,0 +1,543 @@ +#!/usr/bin/env python +# -*- coding: ascii -*- +r""" +===================== + Javascript Minifier +===================== + +rJSmin is a javascript minifier written in python. + +The minifier is based on the semantics of `jsmin.c by Douglas Crockford`_\\. + +:Copyright: + + Copyright 2011 - 2019 + Andr\xe9 Malo or his licensors, as applicable + +:License: + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The module is a re-implementation aiming for speed, so it can be used at +runtime (rather than during a preprocessing step). Usually it produces the +same results as the original ``jsmin.c``. It differs in the following ways: + +- there is no error detection: unterminated string, regex and comment + literals are treated as regular javascript code and minified as such. +- Control characters inside string and regex literals are left untouched; they + are not converted to spaces (nor to \\n) +- Newline characters are not allowed inside string and regex literals, except + for line continuations in string literals (ECMA-5). +- "return /regex/" is recognized correctly. +- More characters are allowed before regexes. +- Line terminators after regex literals are handled more sensibly +- "+ +" and "- -" sequences are not collapsed to '++' or '--' +- Newlines before ! operators are removed more sensibly +- (Unnested) template literals are supported (ECMA-6) +- Comments starting with an exclamation mark (``!``) can be kept optionally +- rJSmin does not handle streams, but only complete strings. (However, the + module provides a "streamy" interface). + +Since most parts of the logic are handled by the regex engine it's way faster +than the original python port of ``jsmin.c`` by Baruch Even. The speed factor +varies between about 6 and 55 depending on input and python version (it gets +faster the more compressed the input already is). Compared to the +speed-refactored python port by Dave St.Germain the performance gain is less +dramatic but still between 3 and 50 (for huge inputs). See the docs/BENCHMARKS +file for details. + +rjsmin.c is a reimplementation of rjsmin.py in C and speeds it up even more. + +Supported python versions are 2.7 and 3.4+. + +.. _jsmin.c by Douglas Crockford: + http://www.crockford.com/javascript/jsmin.c +""" +__author__ = u"Andr\xe9 Malo" +__docformat__ = "restructuredtext en" +__license__ = "Apache License, Version 2.0" +__version__ = '1.1.0' +__all__ = ['jsmin'] + +import functools as _ft +import re as _re + + +def _make_jsmin(python_only=False): + """ + Generate JS minifier based on `jsmin.c by Douglas Crockford`_ + + .. _jsmin.c by Douglas Crockford: + http://www.crockford.com/javascript/jsmin.c + + :Parameters: + `python_only` : ``bool`` + Use only the python variant. If true, the c extension is not even + tried to be loaded. + + :Return: Minifier + :Rtype: ``callable`` + """ + # pylint: disable = unused-variable + # pylint: disable = too-many-locals + + if not python_only: + try: + import _rjsmin + except ImportError: + pass + else: + # Ensure that the C version is in sync + # https://github.com/ndparker/rjsmin/issues/11 + if getattr(_rjsmin, '__version__', None) == __version__: + return _rjsmin.jsmin + try: + xrange + except NameError: + xrange = range # pylint: disable = redefined-builtin + + space_chars = r'[\000-\011\013\014\016-\040]' + + line_comment = r'(?://[^\r\n]*)' + space_comment = r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)' + space_comment_nobang = r'(?:/\*(?!!)[^*]*\*+(?:[^/*][^*]*\*+)*/)' + bang_comment = r'(?:/\*![^*]*\*+(?:[^/*][^*]*\*+)*/)' + + string1 = r"(?:'[^'\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^'\\\r\n]*)*')" + string1 = string1.replace("'", r'\047') # portability + string2 = r'(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^"\\\r\n]*)*")' + string3 = r'(?:`[^`\\]*(?:\\(?:[^\r\n]|\r?\n|\r)[^`\\]*)*`)' + string3 = string3.replace('`', r'\140') # portability + strings = r'(?:%s|%s|%s)' % (string1, string2, string3) + + charclass = r'(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\])' + nospecial = r'[^/\\\[\r\n]' + regex = r'(?:/(?![\r\n/*])%s*(?:(?:\\[^\r\n]|%s)%s*)*/)' % ( + nospecial, charclass, nospecial + ) + space = r'(?:%s|%s)' % (space_chars, space_comment) + newline = r'(?:%s?[\r\n])' % line_comment + + def fix_charclass(result): + """ Fixup string of chars to fit into a regex char class """ + pos = result.find('-') + if pos >= 0: + result = r'%s%s-' % (result[:pos], result[pos + 1:]) + + def sequentize(string): + """ + Notate consecutive characters as sequence + + (1-4 instead of 1234) + """ + first, last, result = None, None, [] + for char in map(ord, string): + if last is None: + first = last = char + elif last + 1 == char: + last = char + else: + result.append((first, last)) + first = last = char + if last is not None: + result.append((first, last)) + return ''.join(['%s%s%s' % ( + chr(first), + last > first + 1 and '-' or '', + last != first and chr(last) or '' + ) for first, last in result]) # noqa + + return _re.sub( + r"([\000-\040'`])", # ' and ` for better portability + lambda m: '\\%03o' % ord(m.group(1)), ( + sequentize(result) + .replace('\\', '\\\\') + .replace('[', '\\[') + .replace(']', '\\]') + ) + ) + + def id_literal_(what): + """ Make id_literal like char class """ + match = _re.compile(what).match + result = ''.join([ + chr(c) for c in xrange(127) if not match(chr(c)) + ]) + return '[^%s]' % fix_charclass(result) + + def not_id_literal_(keep): + """ Make negated id_literal like char class """ + match = _re.compile(id_literal_(keep)).match + result = ''.join([ + chr(c) for c in xrange(127) if not match(chr(c)) + ]) + return r'[%s]' % fix_charclass(result) + + not_id_literal = not_id_literal_(r'[a-zA-Z0-9_$]') + preregex1 = r'[(,=:\[!&|?{};\r\n+*-]' + preregex2 = r'%(not_id_literal)sreturn' % locals() + + id_literal = id_literal_(r'[a-zA-Z0-9_$]') + id_literal_open = id_literal_(r'[a-zA-Z0-9_${\[(!+-]') + id_literal_close = id_literal_(r'[a-zA-Z0-9_$}\])"\047\140+-]') + post_regex_off = id_literal_(r'[^\000-\040}\])?:|,;.&=+-]') + + dull = r'[^\047"\140/\000-\040]' + + space_sub_simple = _re.compile(( + # noqa pylint: disable = bad-continuation + + r'(%(dull)s+)' # 0 + r'|(%(strings)s%(dull)s*)' # 1 + r'|(?<=%(preregex1)s)' + r'%(space)s*(?:%(newline)s%(space)s*)*' + r'(%(regex)s)' # 2 + r'(%(space)s*(?:%(newline)s%(space)s*)+' # 3 + r'(?=%(post_regex_off)s))?' + r'|(?<=%(preregex2)s)' + r'%(space)s*(?:(%(newline)s)%(space)s*)*' # 4 + r'(%(regex)s)' # 5 + r'(%(space)s*(?:%(newline)s%(space)s*)+' # 6 + r'(?=%(post_regex_off)s))?' + r'|(?<=%(id_literal_close)s)' + r'%(space)s*(?:(%(newline)s)%(space)s*)+' # 7 + r'(?=%(id_literal_open)s)' + r'|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)' # 8 + r'|(?<=\+)(%(space)s)+(?=\+)' # 9 + r'|(?<=-)(%(space)s)+(?=-)' # 10 + r'|%(space)s+' + r'|(?:%(newline)s%(space)s*)+' + ) % locals()).sub + + # print(space_sub_simple.__self__.pattern) + + def space_subber_simple(match): + """ Substitution callback """ + # pylint: disable = too-many-return-statements + + groups = match.groups() + if groups[0]: + return groups[0] + elif groups[1]: + return groups[1] + elif groups[2]: + if groups[3]: + return groups[2] + '\n' + return groups[2] + elif groups[5]: + return "%s%s%s" % ( + groups[4] and '\n' or '', + groups[5], + groups[6] and '\n' or '', + ) + elif groups[7]: + return '\n' + elif groups[8] or groups[9] or groups[10]: + return ' ' + else: + return '' + + space_sub_banged = _re.compile(( + # noqa pylint: disable = bad-continuation + + r'(%(dull)s+)' # 0 + r'|(%(strings)s%(dull)s*)' # 1 + r'|(?<=%(preregex1)s)' + r'(%(space)s*(?:%(newline)s%(space)s*)*)' # 2 + r'(%(regex)s)' # 3 + r'(%(space)s*(?:%(newline)s%(space)s*)+' # 4 + r'(?=%(post_regex_off)s))?' + r'|(?<=%(preregex2)s)' + r'(%(space)s*(?:(%(newline)s)%(space)s*)*)' # 5, 6 + r'(%(regex)s)' # 7 + r'(%(space)s*(?:%(newline)s%(space)s*)+' # 8 + r'(?=%(post_regex_off)s))?' + r'|(?<=%(id_literal_close)s)' + r'(%(space)s*(?:%(newline)s%(space)s*)+)' # 9 + r'(?=%(id_literal_open)s)' + r'|(?<=%(id_literal)s)(%(space)s+)(?=%(id_literal)s)' # 10 + r'|(?<=\+)(%(space)s+)(?=\+)' # 11 + r'|(?<=-)(%(space)s+)(?=-)' # 12 + r'|(%(space)s+)' # 13 + r'|((?:%(newline)s%(space)s*)+)' # 14 + ) % locals()).sub + + # print(space_sub_banged.__self__.pattern) + + keep = _re.compile(( + r'%(space_chars)s+|%(space_comment_nobang)s+|%(newline)s+' + r'|(%(bang_comment)s+)' + ) % locals()).sub + keeper = lambda m: m.groups()[0] or '' + + # print(keep.__self__.pattern) + + def space_subber_banged(match): + """ Substitution callback """ + # pylint: disable = too-many-return-statements + + groups = match.groups() + if groups[0]: + return groups[0] + elif groups[1]: + return groups[1] + elif groups[3]: + return "%s%s%s%s" % ( + keep(keeper, groups[2]), + groups[3], + keep(keeper, groups[4] or ''), + groups[4] and '\n' or '', + ) + elif groups[7]: + return "%s%s%s%s%s" % ( + keep(keeper, groups[5]), + groups[6] and '\n' or '', + groups[7], + keep(keeper, groups[8] or ''), + groups[8] and '\n' or '', + ) + elif groups[9]: + return keep(keeper, groups[9]) + '\n' + elif groups[10] or groups[11] or groups[12]: + return keep(keeper, groups[10] or groups[11] or groups[12]) or ' ' + else: + return keep(keeper, groups[13] or groups[14]) + + banged = _ft.partial(space_sub_banged, space_subber_banged) + simple = _ft.partial(space_sub_simple, space_subber_simple) + + def jsmin(script, keep_bang_comments=False): + r""" + Minify javascript based on `jsmin.c by Douglas Crockford`_\. + + Instead of parsing the stream char by char, it uses a regular + expression approach which minifies the whole script with one big + substitution regex. + + .. _jsmin.c by Douglas Crockford: + http://www.crockford.com/javascript/jsmin.c + + :Parameters: + `script` : ``str`` + Script to minify + + `keep_bang_comments` : ``bool`` + Keep comments starting with an exclamation mark? (``/*!...*/``) + + :Return: Minified script + :Rtype: ``str`` + """ + # pylint: disable = redefined-outer-name + + is_bytes, script = _as_str(script) + script = (banged if keep_bang_comments else simple)( + '\n%s\n' % script + ).strip() + if is_bytes: + return script.encode('latin-1') + return script + + return jsmin + +jsmin = _make_jsmin() + + +def _as_str(script): + """ Make sure the script is a text string """ + is_bytes = False + if str is bytes: + if not isinstance(script, basestring): # noqa pylint: disable = undefined-variable + raise TypeError("Unexpected type") + elif isinstance(script, (bytes, bytearray)): + is_bytes = True + script = script.decode('latin-1') + elif not isinstance(script, str): + raise TypeError("Unexpected type") + + return is_bytes, script + + +def jsmin_for_posers(script, keep_bang_comments=False): + r""" + Minify javascript based on `jsmin.c by Douglas Crockford`_\. + + Instead of parsing the stream char by char, it uses a regular + expression approach which minifies the whole script with one big + substitution regex. + + .. _jsmin.c by Douglas Crockford: + http://www.crockford.com/javascript/jsmin.c + + :Warning: This function is the digest of a _make_jsmin() call. It just + utilizes the resulting regexes. It's here for fun and may + vanish any time. Use the `jsmin` function instead. + + :Parameters: + `script` : ``str`` + Script to minify + + `keep_bang_comments` : ``bool`` + Keep comments starting with an exclamation mark? (``/*!...*/``) + + :Return: Minified script + :Rtype: ``str`` + """ + if not keep_bang_comments: + rex = ( + r'([^\047"\140/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^' + r'\r\n]|\r?\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^' + r'\r\n]|\r?\n|\r)[^"\\\r\n]*)*")|(?:\140[^\140\\]*(?:\\(?:[^\r\n' + r']|\r?\n|\r)[^\140\\]*)*\140))[^\047"\140/\000-\040]*)|(?<=[(,=' + r':\[!&|?{};\r\n+*-])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*' + r'\*+(?:[^/*][^*]*\*+)*/))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-' + r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)*(' + r'(?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*' + r'(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/))((?:[\000-\011' + r'\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*(?:(?:(' + r'?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*' + r']*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040&)+,.:;=?\]|}-]))?|' + r'(?<=[\000-#%-,./:-@\[-^\140{-~-]return)(?:[\000-\011\013\014\0' + r'16-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*(?:((?:(?://[^\r' + r'\n]*)?[\r\n]))(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?' + r':[^/*][^*]*\*+)*/))*)*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^' + r'\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r' + r'\n]*)*/))((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/' + r'*][^*]*\*+)*/))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013' + r'\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000' + r'-\040&)+,.:;=?\]|}-]))?|(?<=[^\000-!#%&(*,./:-@\[\\^{|~])(?:[' + r'\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)' + r')*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\014\016-\040' + r']|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#%-\047' + r')*,./:-@\\-^\140|-~])|(?<=[^\000-#%-,./:-@\[-^\140{-~-])((?:[' + r'\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)' + r'))+(?=[^\000-#%-,./:-@\[-^\140{-~-])|(?<=\+)((?:[\000-\011\013' + r'\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<' + r'=-)((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]' + r'*\*+)*/)))+(?=-)|(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*' + r'+(?:[^/*][^*]*\*+)*/))+|(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-' + r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+' + ) + + def subber(match): + """ Substitution callback """ + groups = match.groups() + return ( + groups[0] or + groups[1] or + (groups[3] and (groups[2] + '\n')) or + groups[2] or + (groups[5] and "%s%s%s" % ( + groups[4] and '\n' or '', + groups[5], + groups[6] and '\n' or '', + )) or + (groups[7] and '\n') or + (groups[8] and ' ') or + (groups[9] and ' ') or + (groups[10] and ' ') or + '' + ) + else: + rex = ( + r'([^\047"\140/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^' + r'\r\n]|\r?\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^' + r'\r\n]|\r?\n|\r)[^"\\\r\n]*)*")|(?:\140[^\140\\]*(?:\\(?:[^\r\n' + r']|\r?\n|\r)[^\140\\]*)*\140))[^\047"\140/\000-\040]*)|(?<=[(,=' + r':\[!&|?{};\r\n+*-])((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]' + r'*\*+(?:[^/*][^*]*\*+)*/))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000' + r'-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)*' + r')((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n' + r']*(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/))((?:[\000-\0' + r'11\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*(?:(?' + r':(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[' + r'^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040&)+,.:;=?\]|}-]))' + r'?|(?<=[\000-#%-,./:-@\[-^\140{-~-]return)((?:[\000-\011\013\01' + r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*(?:((?:(?://[^' + r'\r\n]*)?[\r\n]))(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+' + r'(?:[^/*][^*]*\*+)*/))*)*)((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:' + r'\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/' + r'\\\[\r\n]*)*/))((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+' + r'(?:[^/*][^*]*\*+)*/))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\01' + r'1\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[' + r'^\000-\040&)+,.:;=?\]|}-]))?|(?<=[^\000-!#%&(*,./:-@\[\\^{|~])' + r'((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*' + r'+)*/))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-' + r'\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+)(?=[^\000-\040"#%' + r'-\047)*,./:-@\\-^\140|-~])|(?<=[^\000-#%-,./:-@\[-^\140{-~-])(' + r'(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+' + r')*/))+)(?=[^\000-#%-,./:-@\[-^\140{-~-])|(?<=\+)((?:[\000-\011' + r'\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+)(?=\+)' + r'|(?<=-)((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*]' + r'[^*]*\*+)*/))+)(?=-)|((?:[\000-\011\013\014\016-\040]|(?:/\*[^' + r'*]*\*+(?:[^/*][^*]*\*+)*/))+)|((?:(?:(?://[^\r\n]*)?[\r\n])(?:' + r'[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/' + r'))*)+)' + ) + + keep = _re.compile( + r'[\000-\011\013\014\016-\040]+|(?:/\*(?!!)[^*]*\*+(?:[^/*][^*]*' + r'\*+)*/)+|(?:(?://[^\r\n]*)?[\r\n])+|((?:/\*![^*]*\*+(?:[^/*][^' + r'*]*\*+)*/)+)' + ).sub + keeper = lambda m: m.groups()[0] or '' + + def subber(match): + """ Substitution callback """ + groups = match.groups() + return ( + groups[0] or + groups[1] or + groups[3] and "%s%s%s%s" % ( + keep(keeper, groups[2]), + groups[3], + keep(keeper, groups[4] or ''), + groups[4] and '\n' or '', + ) or + groups[7] and "%s%s%s%s%s" % ( + keep(keeper, groups[5]), + groups[6] and '\n' or '', + groups[7], + keep(keeper, groups[8] or ''), + groups[8] and '\n' or '', + ) or + groups[9] and (keep(keeper, groups[9]) + '\n') or + groups[10] and (keep(keeper, groups[10]) or ' ') or + groups[11] and (keep(keeper, groups[11]) or ' ') or + groups[12] and (keep(keeper, groups[12]) or ' ') or + keep(keeper, groups[13] or groups[14]) + ) + + is_bytes, script = _as_str(script) + script = _re.sub(rex, subber, '\n%s\n' % script).strip() + if is_bytes: + return script.encode('latin-1') + return script + + +if __name__ == '__main__': + def main(): + """ Main """ + import sys as _sys + + argv = _sys.argv[1:] + keep_bang_comments = '-b' in argv or '-bp' in argv or '-pb' in argv + if '-p' in argv or '-bp' in argv or '-pb' in argv: + xjsmin = _make_jsmin(python_only=True) + else: + xjsmin = jsmin + + _sys.stdout.write(xjsmin( + _sys.stdin.read(), keep_bang_comments=keep_bang_comments + )) + + main() diff --git a/pelican/plugins/webassets/vendor/webassets/filter/sass.py b/pelican/plugins/webassets/vendor/webassets/filter/sass.py new file mode 100644 index 0000000..02854d6 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/sass.py @@ -0,0 +1,166 @@ +from __future__ import print_function + +import os + +from webassets.filter import ExternalTool + +__all__ = ('Sass', 'SCSS') + + +class Sass(ExternalTool): + """Converts `Sass `_ markup to + real CSS. + + Requires the Sass executable to be available externally. To install + it, you might be able to do:: + + $ sudo npm install -g sass + + By default, this works as an "input filter", meaning ``sass`` is + called for each source file in the bundle. This is because the + path of the source file is required so that @import directives + within the Sass file can be correctly resolved. + + However, it is possible to use this filter as an "output filter", + meaning the source files will first be concatenated, and then the + Sass filter is applied in one go. This can provide a speedup for + bigger projects. + + To use Sass as an output filter:: + + from webassets.filter import get_filter + sass = get_filter('sass', as_output=True) + Bundle(...., filters=(sass,)) + + However, if you want to use the output filter mode and still also + use the @import directive in your Sass files, you will need to + pass along the ``load_paths`` argument, which specifies the path + to which the imports are relative to (this is implemented by + changing the working directory before calling the ``sass`` + executable):: + + sass = get_filter('sass', as_output=True, load_paths='/tmp') + + With ``as_output=True``, the resulting concatenation of the Sass + files is piped to Sass via stdin (``cat ... | sass --stdin ...``) + and may cause applications to not compile if import statements are + given as relative paths. + + For example, if a file ``foo/bar/baz.scss`` imports file + ``foo/bar/bat.scss`` (same directory) and the import is defined as + ``@import "bat";`` then Sass will fail compiling because Sass + has naturally no information on where ``baz.scss`` is located on + disk (since the data was passed via stdin) in order for Sass to + resolve the location of ``bat.scss``:: + + Traceback (most recent call last): + ... + webassets.exceptions.FilterError: sass: subprocess had error: stderr=(sass):1: File to import not found or unreadable: bat. (Sass::SyntaxError) + Load paths: + /path/to/project-foo + on line 1 of standard input + Use --trace for backtrace. + , stdout=, returncode=65 + + To overcome this issue, the full path must be provided in the + import statement, ``@import "foo/bar/bat"``, then webassets + will pass the ``load_paths`` argument (e.g., + ``/path/to/project-foo``) to Sass via its ``-I`` flags so Sass can + resolve the full path to the file to be imported: + ``/path/to/project-foo/foo/bar/bat`` + + Support configuration options: + + SASS_BIN + The path to the Sass binary. If not set, the filter will + try to run ``sass`` as if it's in the system path. + + SASS_STYLE + The style for the output CSS. Can be one of ``expanded`` (default) + or ``compressed``. + + SASS_AS_OUTPUT + By default, this works as an "input filter", meaning ``sass`` is + called for each source file in the bundle. This is because the + path of the source file is required so that @import directives + within the Sass file can be correctly resolved. + + However, it is possible to use this filter as an "output filter", + meaning the source files will first be concatenated, and then the + Sass filter is applied in one go. This can provide a speedup for + bigger projects. + + It will also allow you to share variables between files. + + SASS_LOAD_PATHS + It should be a list of paths relatives to Environment.directory or absolute paths. + Order matters as sass will pick the first file found in path order. + These are fed into the -I flag of the sass command and + is used to control where sass imports code from. + """ + # TODO: If an output filter could be passed the list of all input + # files, the filter might be able to do something interesting with + # it (for example, determine that all source files are in the same + # directory). + + name = 'sass' + options = { + 'binary': 'SASS_BIN', + 'use_scss': ('scss', 'SASS_USE_SCSS'), + 'as_output': 'SASS_AS_OUTPUT', + 'load_paths': 'SASS_LOAD_PATHS', + 'style': 'SASS_STYLE', + } + max_debug_level = None + + def resolve_path(self, path): + return self.ctx.resolver.resolve_source(self.ctx, path) + + def _apply_sass(self, _in, out, cd=None): + # Switch to source file directory if asked, so that this directory + # is by default on the load path. We could pass it via -I, but then + # files in the (undefined) wd could shadow the correct files. + orig_cwd = os.getcwd() + child_cwd = orig_cwd + if cd: + child_cwd = cd + + args = [self.binary or 'sass', + '--stdin', + '--style', self.style or 'expanded'] + + if not self.use_scss: + args.append("--indented") + + for path in self.load_paths or []: + if os.path.isabs(path): + abs_path = path + else: + abs_path = self.resolve_path(path) + args.extend(['-I', abs_path]) + + return self.subprocess(args, out, _in, cwd=child_cwd) + + def input(self, _in, out, source_path, output_path, **kw): + if self.as_output: + out.write(_in.read()) + else: + self._apply_sass(_in, out, os.path.dirname(source_path)) + + def output(self, _in, out, **kwargs): + if not self.as_output: + out.write(_in.read()) + else: + self._apply_sass(_in, out) + + +class SCSS(Sass): + """Version of the ``sass`` filter that uses the SCSS syntax. + """ + + name = 'scss' + + def __init__(self, *a, **kw): + assert 'scss' not in kw + kw['scss'] = True + super(SCSS, self).__init__(*a, **kw) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py b/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py new file mode 100644 index 0000000..63b07b0 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py @@ -0,0 +1,225 @@ +from __future__ import print_function +import os, subprocess + +from webassets.filter import ExternalTool +from webassets.cache import FilesystemCache + + +__all__ = ('RubySass', 'RubySCSS') + + +class RubySass(ExternalTool): + """Converts `Sass `_ markup to real CSS. + + This filter uses the legacy ruby Sass compiler, which has been + replaced by the dart version in use in the ``sass`` filter. + + Requires the Sass executable to be available externally. To install + it, you might be able to do:: + + $ sudo gem install sass + + By default, this works as an "input filter", meaning ``sass`` is + called for each source file in the bundle. This is because the + path of the source file is required so that @import directives + within the Sass file can be correctly resolved. + + However, it is possible to use this filter as an "output filter", + meaning the source files will first be concatenated, and then the + Sass filter is applied in one go. This can provide a speedup for + bigger projects. + + To use Sass as an output filter:: + + from webassets.filter import get_filter + sass = get_filter('sass', as_output=True) + Bundle(...., filters=(sass,)) + + However, if you want to use the output filter mode and still also + use the @import directive in your Sass files, you will need to + pass along the ``load_paths`` argument, which specifies the path + to which the imports are relative to (this is implemented by + changing the working directory before calling the ``sass`` + executable):: + + sass = get_filter('sass', as_output=True, load_paths='/tmp') + + With ``as_output=True``, the resulting concatenation of the Sass + files is piped to Sass via stdin (``cat ... | sass --stdin ...``) + and may cause applications to not compile if import statements are + given as relative paths. + + For example, if a file ``foo/bar/baz.scss`` imports file + ``foo/bar/bat.scss`` (same directory) and the import is defined as + ``@import "bat";`` then Sass will fail compiling because Sass + has naturally no information on where ``baz.scss`` is located on + disk (since the data was passed via stdin) in order for Sass to + resolve the location of ``bat.scss``:: + + Traceback (most recent call last): + ... + webassets.exceptions.FilterError: sass: subprocess had error: stderr=(sass):1: File to import not found or unreadable: bat. (Sass::SyntaxError) + Load paths: + /path/to/project-foo + on line 1 of standard input + Use --trace for backtrace. + , stdout=, returncode=65 + + To overcome this issue, the full path must be provided in the + import statement, ``@import "foo/bar/bat"``, then webassets + will pass the ``load_paths`` argument (e.g., + ``/path/to/project-foo``) to Sass via its ``-I`` flags so Sass can + resolve the full path to the file to be imported: + ``/path/to/project-foo/foo/bar/bat`` + + Support configuration options: + + SASS_BIN + The path to the Sass binary. If not set, the filter will + try to run ``sass`` as if it's in the system path. + + SASS_STYLE + The style for the output CSS. Can be one of ``expanded`` (default), + ``nested``, ``compact`` or ``compressed``. + + SASS_DEBUG_INFO + If set to ``True``, will cause Sass to output debug information + to be used by the FireSass Firebug plugin. Corresponds to the + ``--debug-info`` command line option of Sass. + + Note that for this, Sass uses ``@media`` rules, which are + not removed by a CSS compressor. You will thus want to make + sure that this option is disabled in production. + + By default, the value of this option will depend on the + environment ``DEBUG`` setting. + + SASS_LINE_COMMENTS + Passes ``--line-comments`` flag to sass which emit comments in the + generated CSS indicating the corresponding source line. + + Note that this option is disabled by Sass if ``--style compressed`` or + ``--debug-info`` options are provided. + + Enabled by default. To disable, set empty environment variable + ``SASS_LINE_COMMENTS=`` or pass ``line_comments=False`` to this filter. + + SASS_AS_OUTPUT + By default, this works as an "input filter", meaning ``sass`` is + called for each source file in the bundle. This is because the + path of the source file is required so that @import directives + within the Sass file can be correctly resolved. + + However, it is possible to use this filter as an "output filter", + meaning the source files will first be concatenated, and then the + Sass filter is applied in one go. This can provide a speedup for + bigger projects. + + It will also allow you to share variables between files. + + SASS_SOURCE_MAP + If provided, this will generate source maps in the output depending + on the type specified. By default this will use Sass's ``auto``. + Possible values are ``auto``, ``file``, ``inline``, or ``none``. + + SASS_LOAD_PATHS + It should be a list of paths relatives to Environment.directory or absolute paths. + Order matters as sass will pick the first file found in path order. + These are fed into the -I flag of the sass command and + is used to control where sass imports code from. + + SASS_LIBS + It should be a list of paths relatives to Environment.directory or absolute paths. + These are fed into the -r flag of the sass command and + is used to require ruby libraries before running sass. + """ + # TODO: If an output filter could be passed the list of all input + # files, the filter might be able to do something interesting with + # it (for example, determine that all source files are in the same + # directory). + + name = 'sass_ruby' + options = { + 'binary': 'SASS_BIN', + 'use_scss': ('scss', 'SASS_USE_SCSS'), + 'use_compass': ('use_compass', 'SASS_COMPASS'), + 'debug_info': 'SASS_DEBUG_INFO', + 'as_output': 'SASS_AS_OUTPUT', + 'load_paths': 'SASS_LOAD_PATHS', + 'libs': 'SASS_LIBS', + 'style': 'SASS_STYLE', + 'source_map': 'SASS_SOURCE_MAP', + 'line_comments': 'SASS_LINE_COMMENTS', + } + max_debug_level = None + + def resolve_path(self, path): + return self.ctx.resolver.resolve_source(self.ctx, path) + + def _apply_sass(self, _in, out, cd=None): + # Switch to source file directory if asked, so that this directory + # is by default on the load path. We could pass it via -I, but then + # files in the (undefined) wd could shadow the correct files. + orig_cwd = os.getcwd() + child_cwd = orig_cwd + if cd: + child_cwd = cd + + args = [self.binary or 'sass', + '--stdin', + '--style', self.style or 'expanded'] + if self.line_comments is None or self.line_comments: + args.append('--line-comments') + if isinstance(self.ctx.cache, FilesystemCache): + args.extend(['--cache-location', + os.path.join(orig_cwd, self.ctx.cache.directory, 'sass')]) + elif not cd: + # Without a fixed working directory, the location of the cache + # is basically undefined, so prefer not to use one at all. + args.extend(['--no-cache']) + if (self.ctx.environment.debug if self.debug_info is None else self.debug_info): + args.append('--debug-info') + if self.use_scss: + args.append('--scss') + if self.use_compass: + args.append('--compass') + if self.source_map: + args.append('--sourcemap=' + self.source_map) + for path in self.load_paths or []: + if os.path.isabs(path): + abs_path = path + else: + abs_path = self.resolve_path(path) + args.extend(['-I', abs_path]) + for lib in self.libs or []: + if os.path.isabs(lib): + abs_path = lib + else: + abs_path = self.resolve_path(lib) + args.extend(['-r', abs_path]) + + return self.subprocess(args, out, _in, cwd=child_cwd) + + def input(self, _in, out, source_path, output_path, **kw): + if self.as_output: + out.write(_in.read()) + else: + self._apply_sass(_in, out, os.path.dirname(source_path)) + + def output(self, _in, out, **kwargs): + if not self.as_output: + out.write(_in.read()) + else: + self._apply_sass(_in, out) + + +class RubySCSS(RubySass): + """Version of the ``sass`` filter that uses the SCSS syntax. + """ + + name = 'scss_ruby' + + def __init__(self, *a, **kw): + assert not 'scss' in kw + kw['scss'] = True + super(RubySCSS, self).__init__(*a, **kw) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/slimit.py b/pelican/plugins/webassets/vendor/webassets/filter/slimit.py new file mode 100644 index 0000000..0b8fae0 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/slimit.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import +from webassets.filter import Filter + + +__all__ = ('Slimit',) + + +class Slimit(Filter): + """Minifies JS. + + Requires the ``slimit`` package (https://github.com/rspivak/slimit), + which is a JavaScript minifier written in Python. It compiles JavaScript + into more compact code so that it downloads and runs faster. + + It offers mangle and mangle_toplevel options through SLIMIT_MANGLE and SLIMIT_MANGLE_TOPLEVEL + """ + + name = 'slimit' + options = {"mangle": "SLIMIT_MANGLE", "mangle_toplevel": "SLIMIT_MANGLE_TOPLEVEL"} + + def setup(self): + try: + import slimit + except ImportError: + raise EnvironmentError('The "slimit" package is not installed.') + else: + self.slimit = slimit + + def output(self, _in, out, **kw): + out.write(self.slimit.minify(_in.read(), + mangle=self.mangle, mangle_toplevel=self.mangle_toplevel)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py b/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py new file mode 100644 index 0000000..7e98bfd --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py @@ -0,0 +1,26 @@ +from __future__ import absolute_import + +from webassets.filter import Filter + + +__all__ = ('CSSSlimmer',) + + +class Slimmer(Filter): + + def setup(self): + super(Slimmer, self).setup() + import slimmer + self.slimmer = slimmer + + +class CSSSlimmer(Slimmer): + """Minifies CSS by removing whitespace, comments etc., using the Python + `slimmer `_ library. + """ + + name = 'css_slimmer' + + def output(self, _in, out, **kw): + out.write(self.slimmer.css_slimmer(_in.read())) + diff --git a/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py b/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py new file mode 100644 index 0000000..8ee8465 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py @@ -0,0 +1,125 @@ +from __future__ import print_function +from __future__ import absolute_import +from webassets.six import StringIO +from contextlib import contextmanager +from webassets.filter import Filter + +try: + from spritecss.main import CSSFile + from spritecss.css import CSSParser + from spritecss.css.parser import iter_print_css + from spritecss.config import CSSConfig + from spritecss.mapper import SpriteMapCollector + from spritecss.packing import PackedBoxes, print_packed_size + from spritecss.packing.sprites import open_sprites + from spritecss.stitch import stitch + from spritecss.replacer import SpriteReplacer + +except ImportError: + spritecss_loaded = False + +else: + spritecss_loaded = True + + class FakeCSSFile(CSSFile): + """ + A custom subclass of spritecss.main.CSSFile that accepts CSS input + as string data, instead of requiring that a CSS file be read from + disk. + """ + + def __init__(self, fname, conf=None, data=''): + super(FakeCSSFile, self).__init__(fname, conf=conf) + self.data = StringIO(data) + + @contextmanager + def open_parser(self): + yield CSSParser.read_file(self.data) + + +__all__ = ('Spritemapper',) + + +class Spritemapper(Filter): + """ + Generate CSS spritemaps using + `Spritemapper `_, a Python + utility that merges multiple images into one and generates CSS positioning + for the corresponding slices. Installation is easy:: + + pip install spritemapper + + Supported configuration options: + + SPRITEMAPPER_PADDING + A tuple of integers indicating the number of pixels of padding to + place between sprites + + SPRITEMAPPER_ANNEAL_STEPS + Affects the number of combinations to be attempted by the box packer + algorithm + + **Note:** Since the ``spritemapper`` command-line utility expects source + and output files to be on the filesystem, this filter interfaces directly + with library internals instead. It has been tested to work with + Spritemapper version 1.0. + """ + + name = 'spritemapper' + + def setup(self): + + if not spritecss_loaded: + raise EnvironmentError( + "The spritemapper package could not be found." + ) + + self.options = {} + padding = self.get_config('SPRITEMAPPER_PADDING', require=False) + if padding: + self.options['padding'] = padding + anneal_steps = self.get_config('SPRITEMAPPER_ANNEAL_STEPS', require=False) + if anneal_steps: + self.options['anneal_steps'] = anneal_steps + + def input(self, _in, out, **kw): + + source_path = kw['source_path'] + + # Save the input data for later + css = _in.read() + + # Build config object + conf = CSSConfig(base=self.options, fname=source_path) + + # Instantiate a dummy file instance + cssfile = FakeCSSFile(fname=source_path, conf=conf, data=css) + + # Find spritemaps + smaps = SpriteMapCollector(conf=conf) + smaps.collect(cssfile.map_sprites()) + + # Weed out single-image spritemaps + smaps = [sm for sm in smaps if len(sm) > 1] + + # Generate spritemapped image + # This code is almost verbatim from spritecss.main.spritemap + sm_plcs = [] + for smap in smaps: + with open_sprites(smap, pad=conf.padding) as sprites: + print(("Packing sprites in mapping %s" % (smap.fname,))) + packed = PackedBoxes(sprites, anneal_steps=conf.anneal_steps) + print_packed_size(packed) + sm_plcs.append((smap, packed.placements)) + print(("Writing spritemap image at %s" % (smap.fname,))) + im = stitch(packed) + with open(smap.fname, "wb") as fp: + im.save(fp) + + # Instantiate a fake file instance again + cssfile = FakeCSSFile(fname=source_path, conf=conf, data=css) + + # Output rewritten CSS with spritemapped URLs + replacer = SpriteReplacer(sm_plcs) + for data in iter_print_css(replacer(cssfile)): + out.write(data) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/stylus.py b/pelican/plugins/webassets/vendor/webassets/filter/stylus.py new file mode 100644 index 0000000..63d9205 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/stylus.py @@ -0,0 +1,52 @@ +import os +from webassets.filter import ExternalTool, option + + +__all__ = ('Stylus',) + + +class Stylus(ExternalTool): + """Converts `Stylus `_ markup to CSS. + + Requires the Stylus executable to be available externally. You can install + it using the `Node Package Manager `_:: + + $ npm install -g stylus + + Supported configuration options: + + STYLUS_BIN + The path to the Stylus binary. If not set, assumes ``stylus`` is in the + system path. + + STYLUS_PLUGINS + A Python list of Stylus plugins to use. Each plugin will be included + via Stylus's command-line ``--use`` argument. + + STYLUS_EXTRA_ARGS + A Python list of any additional command-line arguments. + + STYLUS_EXTRA_PATHS + A Python list of any additional import paths. + """ + + name = 'stylus' + options = { + 'stylus': 'STYLUS_BIN', + 'plugins': option('STYLUS_PLUGINS', type=list), + 'extra_args': option('STYLUS_EXTRA_ARGS', type=list), + 'extra_paths': option('STYLUS_EXTRA_PATHS', type=list), + } + max_debug_level = None + + def input(self, _in, out, **kwargs): + args = [self.stylus or 'stylus'] + source_dir = os.path.dirname(kwargs['source_path']) + paths = [source_dir] + (self.extra_paths or []) + for path in paths: + args.extend(('--include', path)) + for plugin in self.plugins or []: + args.extend(('--use', plugin)) + if self.extra_args: + args.extend(self.extra_args) + self.subprocess(args, out, _in) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/typescript.py b/pelican/plugins/webassets/vendor/webassets/filter/typescript.py new file mode 100644 index 0000000..bed10ae --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/typescript.py @@ -0,0 +1,60 @@ +import os +import subprocess +import tempfile +from io import open # Give 2 and 3 use same newline behaviour. + +from webassets.filter import Filter +from webassets.exceptions import FilterError + + +__all__ = ('TypeScript',) + + + +class TypeScript(Filter): + """Compile `TypeScript `_ to JavaScript. + + TypeScript is an external tool written for NodeJS. + This filter assumes that the ``tsc`` executable is in the path. Otherwise, you + may define the ``TYPESCRIPT_BIN`` setting. + + To specify TypeScript compiler options, ``TYPESCRIPT_CONFIG`` may be defined. + E.g.: ``--removeComments true --target ES6``. + """ + + name = 'typescript' + max_debug_level = None + options = { + 'binary': 'TYPESCRIPT_BIN', + 'config': 'TYPESCRIPT_CONFIG' + } + + def output(self, _in, out, **kw): + # The typescript compiler cannot read a file which does not have + # the .ts extension. The output file needs to have an extension, + # or the compiler will want to create a directory in its place. + input_filename = tempfile.mktemp() + ".ts" + output_filename = tempfile.mktemp() + ".js" + + with open(input_filename, 'w') as f: + f.write(_in.read()) + + args = [self.binary or 'tsc', '--out', output_filename, input_filename] + if self.config: + args += self.config.split() + proc = subprocess.Popen( + args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=(os.name == 'nt')) + stdout, stderr = proc.communicate() + if proc.returncode != 0: + raise FilterError("typescript: subprocess had error: stderr=%s," % stderr + + "stdout=%s, returncode=%s" % (stdout, proc.returncode)) + + with open(output_filename, 'r') as f: + out.write(f.read()) + + os.unlink(input_filename) + os.unlink(output_filename) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py b/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py new file mode 100644 index 0000000..7e35255 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py @@ -0,0 +1,32 @@ +from webassets.filter import ExternalTool + + +__all__ = ('UglifyJS',) + + +class UglifyJS(ExternalTool): + """ + Minify Javascript using `UglifyJS `_. + + The filter requires version 2 of UglifyJS. + + UglifyJS is an external tool written for NodeJS; this filter assumes that + the ``uglifyjs`` executable is in the path. Otherwise, you may define + a ``UGLIFYJS_BIN`` setting. + + Additional options may be passed to ``uglifyjs`` using the setting + ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings. + """ + + name = 'uglifyjs' + options = { + 'binary': 'UGLIFYJS_BIN', + 'extra_args': 'UGLIFYJS_EXTRA_ARGS', + } + + def output(self, _in, out, **kw): + # UglifyJS 2 doesn't properly read data from stdin (#212). + args = [self.binary or 'uglifyjs', '{input}', '--output', '{output}'] + if self.extra_args: + args.extend(self.extra_args) + self.subprocess(args, out, _in) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/yui.py b/pelican/plugins/webassets/vendor/webassets/filter/yui.py new file mode 100644 index 0000000..0c67de4 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/filter/yui.py @@ -0,0 +1,54 @@ +"""Minify Javascript and CSS with +`YUI Compressor `_. + +YUI Compressor is an external tool written in Java, which needs to be +available. One way to get it is to install the +`yuicompressor `_ package:: + + pip install yuicompressor + +No configuration is necessary in this case. + +You can also get YUI compressor a different way and define +a ``YUI_COMPRESSOR_PATH`` setting that points to the ``.jar`` file. +Otherwise, an environment variable by the same name is tried. The +filter will also look for a ``JAVA_HOME`` environment variable to +run the ``.jar`` file, or will otherwise assume that ``java`` is +on the system path. +""" + +from webassets.filter import JavaTool + + +__all__ = ('YUIJS', 'YUICSS',) + + +class YUIBase(JavaTool): + + def setup(self): + super(YUIBase, self).setup() + + try: + self.jar = self.get_config('YUI_COMPRESSOR_PATH', + what='YUI Compressor') + except EnvironmentError: + raise EnvironmentError( + "\nYUI Compressor jar can't be found." + "\nPlease provide a YUI_COMPRESSOR_PATH setting or an " + "environment variable with the full path to the " + "YUI compressor jar." + ) + + def output(self, _in, out, **kw): + self.subprocess( + ['--charset=utf-8', '--type=%s' % self.mode], out, _in) + + +class YUIJS(YUIBase): + name = 'yui_js' + mode = 'js' + + +class YUICSS(YUIBase): + name = 'yui_css' + mode = 'css' diff --git a/pelican/plugins/webassets/vendor/webassets/importlib.py b/pelican/plugins/webassets/vendor/webassets/importlib.py new file mode 100644 index 0000000..48846f7 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/importlib.py @@ -0,0 +1,38 @@ +# From Python 2.7. + +import sys + +def _resolve_name(name, package, level): + """Return the absolute name of the module to be imported.""" + if not hasattr(package, 'rindex'): + raise ValueError("'package' not set to a string") + dot = len(package) + for x in range(level, 1, -1): + try: + dot = package.rindex('.', 0, dot) + except ValueError: + raise ValueError("attempted relative import beyond top-level " + "package") + return "%s.%s" % (package[:dot], name) + + +def import_module(name, package=None): + """Import a module. + + The 'package' argument is required when performing a relative import. It + specifies the package to use as the anchor point from which to resolve the + relative import to an absolute import. + + """ + if name.startswith('.'): + if not package: + raise TypeError("relative imports require the 'package' argument") + level = 0 + for character in name: + if character != '.': + break + level += 1 + name = _resolve_name(name[level:], package, level) + __import__(name) + return sys.modules[name] + diff --git a/pelican/plugins/webassets/vendor/webassets/loaders.py b/pelican/plugins/webassets/vendor/webassets/loaders.py new file mode 100644 index 0000000..500ab57 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/loaders.py @@ -0,0 +1,338 @@ +"""Loaders are helper classes which will read environments and/or +bundles from a source, like a configuration file. + +This can be used as an alternative to an imperative setup. +""" + +import os, sys +from os import path +import glob, fnmatch +import inspect +import types +from webassets import six +try: + import yaml +except ImportError: + pass + +from webassets import six +from webassets import Environment +from webassets.bundle import Bundle +from webassets.exceptions import EnvironmentError +from webassets.filter import register_filter +from webassets.importlib import import_module + + +__all__ = ('Loader', 'LoaderError', 'PythonLoader', 'YAMLLoader', + 'GlobLoader',) + + + +class LoaderError(Exception): + """Loaders should raise this when they can't deal with a given file. + """ + + +class YAMLLoader(object): + """Will load an environment or a set of bundles from + `YAML `_ files. + """ + + def __init__(self, file_or_filename): + try: + yaml + except NameError: + raise EnvironmentError('PyYAML is not installed') + else: + self.yaml = yaml + self.file_or_filename = file_or_filename + + def _yield_bundle_contents(self, data): + """Yield bundle contents from the given dict. + + Each item yielded will be either a string representing a file path + or a bundle.""" + contents = data.get('contents', []) + if isinstance(contents, six.string_types): + contents = contents, + for content in contents: + if isinstance(content, dict): + content = self._get_bundle(content) + yield content + + def _get_bundle(self, data): + """Return a bundle initialised by the given dict.""" + kwargs = dict( + filters=data.get('filters', None), + output=data.get('output', None), + debug=data.get('debug', None), + extra=data.get('extra', {}), + config=data.get('config', {}), + depends=data.get('depends', None)) + return Bundle(*list(self._yield_bundle_contents(data)), **kwargs) + + def _get_bundles(self, obj, known_bundles=None): + """Return a dict that keys bundle names to bundles.""" + bundles = {} + for key, data in six.iteritems(obj): + if data is None: + data = {} + bundles[key] = self._get_bundle(data) + + # now we need to recurse through the bundles and get any that + # are included in each other. + for bundle_name, bundle in bundles.items(): + # copy contents + contents = list(bundle.contents) + for i, item in enumerate(bundle.contents): + if item in bundles: + contents[i] = bundles[item] + elif known_bundles and item in known_bundles: + contents[i] = known_bundles[item] + # cast back to a tuple + contents = tuple(contents) + if contents != bundle.contents: + bundle.contents = contents + return bundles + + def _open(self): + """Returns a (fileobj, filename) tuple. + + The filename can be False if it is unknown. + """ + if isinstance(self.file_or_filename, six.string_types): + return open(self.file_or_filename), self.file_or_filename + + file = self.file_or_filename + return file, getattr(file, 'name', False) + + @classmethod + def _get_import_resolver(cls): + """ method that can be overridden in tests """ + from zope.dottedname.resolve import resolve as resolve_dotted + return resolve_dotted + + def load_bundles(self, environment=None): + """Load a list of :class:`Bundle` instances defined in the YAML file. + + Expects the following format: + + .. code-block:: yaml + + bundle-name: + filters: sass,cssutils + output: cache/default.css + contents: + - css/jquery.ui.calendar.css + - css/jquery.ui.slider.css + another-bundle: + # ... + + Bundles may reference each other: + + .. code-block:: yaml + + js-all: + contents: + - jquery.js + - jquery-ui # This is a bundle reference + jquery-ui: + contents: jqueryui/*.js + + If an ``environment`` argument is given, it's bundles + may be referenced as well. Note that you may pass any + compatibly dict-like object. + + Finally, you may also use nesting: + + .. code-block:: yaml + + js-all: + contents: + - jquery.js + # This is a nested bundle + - contents: "*.coffee" + filters: coffeescript + + """ + # TODO: Support a "consider paths relative to YAML location, return + # as absolute paths" option? + f, _ = self._open() + try: + obj = self.yaml.safe_load(f) or {} + return self._get_bundles(obj, environment) + finally: + f.close() + + def load_environment(self): + """Load an :class:`Environment` instance defined in the YAML file. + + Expects the following format: + + .. code-block:: yaml + + directory: ../static + url: /media + debug: True + updater: timestamp + filters: + - my_custom_package.my_filter + config: + compass_bin: /opt/compass + another_custom_config_value: foo + + bundles: + # ... + + All values, including ``directory`` and ``url`` are optional. The + syntax for defining bundles is the same as for + :meth:`~.YAMLLoader.load_bundles`. + + Sample usage:: + + from webassets.loaders import YAMLLoader + loader = YAMLLoader('asset.yml') + env = loader.load_environment() + + env['some-bundle'].urls() + """ + f, filename = self._open() + try: + obj = self.yaml.safe_load(f) or {} + + env = Environment() + + # Load environment settings + for setting in ('debug', 'cache', 'versions', 'url_expire', + 'auto_build', 'url', 'directory', 'manifest', 'load_path', + 'cache_file_mode', + # TODO: The deprecated values; remove at some point + 'expire', 'updater'): + if setting in obj: + setattr(env, setting, obj[setting]) + + # Treat the 'directory' option special, make it relative to the + # path of the YAML file, if we know it. + if filename and 'directory' in env.config: + env.directory = path.normpath( + path.join(path.dirname(filename), + env.config['directory'])) + + # Treat the 'filters' option special, it should resolve the + # entries as classes and register them to the environment + if 'filters' in obj: + try: + resolve_dotted = self._get_import_resolver() + except ImportError: + raise EnvironmentError( + "In order to use custom filters in the YAMLLoader " + "you must install the zope.dottedname package") + for filter_class in obj['filters']: + try: + cls = resolve_dotted(filter_class) + except ImportError: + raise LoaderError("Unable to resolve class %s" % filter_class) + if inspect.isclass(cls): + register_filter(cls) + else: + raise LoaderError("Custom filters must be classes " + "not modules or functions") + + # Load custom config options + if 'config' in obj: + env.config.update(obj['config']) + + # Load bundles + bundles = self._get_bundles(obj.get('bundles', {})) + for name, bundle in six.iteritems(bundles): + env.register(name, bundle) + + return env + finally: + f.close() + + +class PythonLoader(object): + """Basically just a simple helper to import a Python file and + retrieve the bundles defined there. + """ + + environment = "environment" + + def __init__(self, module_name): + if isinstance(module_name, types.ModuleType): + self.module = module_name + else: + sys.path.insert(0, '') # Ensure the current directory is on the path + try: + try: + if ":" in module_name: + module_name, env = module_name.split(":") + self.environment = env + self.module = import_module(module_name) + except ImportError as e: + raise LoaderError(e) + finally: + sys.path.pop(0) + + def load_bundles(self): + """Load ``Bundle`` objects defined in the Python module. + + Collects all bundles in the global namespace. + """ + bundles = {} + for name in dir(self.module): + value = getattr(self.module, name) + if isinstance(value, Bundle): + bundles[name] = value + return bundles + + def load_environment(self): + """Load an ``Environment`` defined in the Python module. + + Expects as default a global name ``environment`` to be defined, + or overridden by passing a string ``module:environment`` to the + constructor. + """ + try: + return getattr(self.module, self.environment) + except AttributeError as e: + raise LoaderError(e) + + +def recursive_glob(treeroot, pattern): + """ + From: + http://stackoverflow.com/questions/2186525/2186639#2186639 + """ + results = [] + for base, dirs, files in os.walk(treeroot): + goodfiles = fnmatch.filter(files, pattern) + results.extend(os.path.join(base, f) for f in goodfiles) + return results + + +class GlobLoader(object): + """Base class with some helpers for loaders which need to search + for files. + """ + + def glob_files(self, f, recursive=False): + if isinstance(f, tuple): + return iter(recursive_glob(f[0], f[1])) + else: + return iter(glob.glob(f)) + + def with_file(self, filename, then_run): + """Call ``then_run`` with the file contents. + """ + file = open(filename, 'rb') + try: + contents = file.read() + try: + return then_run(filename, contents) + except LoaderError: + # We can't handle this file. + pass + finally: + file.close() diff --git a/pelican/plugins/webassets/vendor/webassets/merge.py b/pelican/plugins/webassets/vendor/webassets/merge.py new file mode 100644 index 0000000..3d70bff --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/merge.py @@ -0,0 +1,356 @@ +"""Contains the core functionality that manages merging of assets. +""" +from __future__ import with_statement +import contextlib + +try: + from urllib.request import Request as URLRequest, urlopen + from urllib.error import HTTPError +except ImportError: + from urllib2 import Request as URLRequest, urlopen + from urllib2 import HTTPError +import logging +from io import open +from webassets import six +from webassets.six.moves import filter + +from .utils import cmp_debug_levels, StringIO, hash_func + + +__all__ = ('FileHunk', 'MemoryHunk', 'merge', 'FilterTool', + 'MoreThanOneFilterError', 'NoFilters') + + +# Log which is used to output low-level information about what the build does. +# This is setup such that it does not output just because the root level +# "webassets" logger is set to level DEBUG (for example via the commandline +# --verbose option). Instead, the messages are only shown when an environment +# variable is set. +# However, we might want to change this in the future. The CLI --verbose option +# could instead just set the level to NOTICE, for example. +log = logging.getLogger('webassets.debug') +log.addHandler(logging.StreamHandler()) +import os +if os.environ.get('WEBASSETS_DEBUG'): + log.setLevel(logging.DEBUG) +else: + log.setLevel(logging.ERROR) + + +class BaseHunk(object): + """Abstract base class. + """ + + def mtime(self): + raise NotImplementedError() + + def id(self): + return hash_func(self.data()) + + def __eq__(self, other): + if isinstance(other, BaseHunk): + # Allow class to be used as a unique dict key. + return hash_func(self) == hash_func(other) + return False + + def data(self): + raise NotImplementedError() + + def save(self, filename): + with open(filename, 'w', encoding='utf-8') as f: + f.write(self.data()) + + +class FileHunk(BaseHunk): + """Exposes a single file through as a hunk. + """ + + def __init__(self, filename): + self.filename = filename + + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, self.filename) + + def mtime(self): + pass + + def data(self): + f = open(self.filename, 'r', encoding='utf-8') + try: + return f.read() + finally: + f.close() + + +class UrlHunk(BaseHunk): + """Represents a file that is referenced by an Url. + + If an environment is given, it's cache will be used to cache the url + contents, and to access it, as allowed by the etag/last modified headers. + """ + + def __init__(self, url, env=None): + self.url = url + self.env = env + + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, self.url) + + def data(self): + if not hasattr(self, '_data'): + request = URLRequest(self.url) + + # Look in the cache for etag / last modified headers to use + # TODO: "expires" header could be supported + if self.env and self.env.cache: + headers = self.env.cache.get( + ('url', 'headers', self.url)) + if headers: + etag, lmod = headers + if etag: request.add_header('If-None-Match', etag) + if lmod: request.add_header('If-Modified-Since', lmod) + + # Make a request + try: + response = urlopen(request) + except HTTPError as e: + if e.code != 304: + raise + # Use the cached version of the url + self._data = self.env.cache.get(('url', 'contents', self.url)) + else: + with contextlib.closing(response): + data = response.read() + if isinstance(data, six.binary_type): + data = data.decode('utf-8') + self._data = data + + # Cache the info from this request + if self.env and self.env.cache: + self.env.cache.set( + ('url', 'headers', self.url), + (response.headers.get("ETag"), + response.headers.get("Last-Modified"))) + self.env.cache.set(('url', 'contents', self.url), self._data) + return self._data + + +class MemoryHunk(BaseHunk): + """Content that is no longer a direct representation of a source file. It + might have filters applied, and is probably the result of merging multiple + individual source files together. + """ + + def __init__(self, data, files=None): + self._data = data + self.files = files or [] + + def __repr__(self): + # Include a has of the data. We want this during logging, so we + # can see which hunks contain identical content. Because this is + # a question of performance, make sure to log in such a way that + # when logging is disabled, this won't be called, i.e.: don't + # %s-format yourself, let logging do it as needed. + return '<%s %s>' % (self.__class__.__name__, hash_func(self)) + + def mtime(self): + pass + + def data(self): + if hasattr(self._data, 'read'): + return self._data.read() + return self._data + + def save(self, filename): + f = open(filename, 'w', encoding='utf-8') + try: + f.write(self.data()) + finally: + f.close() + + +def merge(hunks, separator=None): + """Merge the given list of hunks, returning a new ``MemoryHunk`` object. + """ + # TODO: combine the list of source files, we'd like to collect them + # The linebreak is important in certain cases for Javascript + # files, like when a last line is a //-comment. + if not separator: + separator = '\n' + return MemoryHunk(separator.join([h.data() for h in hunks])) + + +class MoreThanOneFilterError(Exception): + + def __init__(self, message, filters): + Exception.__init__(self, message) + self.filters = filters + + +class NoFilters(Exception): + pass + + +class FilterTool(object): + """Can apply filters to hunk objects, while using the cache. + + If ``no_cache_read`` is given, then the cache will not be considered for + this operation (though the result will still be written to the cache). + + ``kwargs`` are options that should be passed along to the filters. + """ + + VALID_TRANSFORMS = ('input', 'output',) + VALID_FUNCS = ('open', 'concat',) + + def __init__(self, cache=None, no_cache_read=False, kwargs=None): + self.cache = cache + self.no_cache_read = no_cache_read + self.kwargs = kwargs or {} + + def _wrap_cache(self, key, func): + """Return cache value ``key``, or run ``func``. + """ + if self.cache: + if not self.no_cache_read: + log.debug('Checking cache for key %s', key) + content = self.cache.get(key) + if not content in (False, None): + log.debug('Using cached result for %s', key) + return MemoryHunk(content) + + content = func().getvalue() + if self.cache: + log.debug('Storing result in cache with key %s', key,) + self.cache.set(key, content) + return MemoryHunk(content) + + def apply(self, hunk, filters, type, kwargs=None): + """Apply the given list of filters to the hunk, returning a new + ``MemoryHunk`` object. + + ``kwargs`` are options that should be passed along to the filters. + If ``hunk`` is a file hunk, a ``source_path`` key will automatically + be added to ``kwargs``. + """ + assert type in self.VALID_TRANSFORMS + log.debug('Need to run method "%s" of filters (%s) on hunk %s with ' + 'kwargs=%s', type, filters, hunk, kwargs) + + filters = [f for f in filters if getattr(f, type, None)] + if not filters: # Short-circuit + log.debug('No filters have "%s" methods, returning hunk ' + 'unchanged' % (type,)) + return hunk + + kwargs_final = self.kwargs.copy() + kwargs_final.update(kwargs or {}) + + def func(): + data = StringIO(hunk.data()) + for filter in filters: + log.debug('Running method "%s" of %s with kwargs=%s', + type, filter, kwargs_final) + out = StringIO(u'') # For 2.x, StringIO().getvalue() returns str + getattr(filter, type)(data, out, **kwargs_final) + data = out + data.seek(0) + + return data + + additional_cache_keys = [] + if kwargs_final: + for filter in filters: + additional_cache_keys += filter.get_additional_cache_keys(**kwargs_final) + + # Note that the key used to cache this hunk is different from the key + # the hunk will expose to subsequent merges, i.e. hunk.key() is always + # based on the actual content, and does not match the cache key. The + # latter also includes information about for example the filters used. + # + # It wouldn't have to be this way. Hunk could subsequently expose their + # cache key through hunk.key(). This would work as well, but would be + # an inferior solution: Imagine a source file which receives + # non-substantial changes, in the sense that they do not affect the + # filter output, for example whitespace. If a hunk's key is the cache + # key, such a change would invalidate the caches for all subsequent + # operations on this hunk as well, even though it didn't actually + # change after all. + key = ("hunk", hunk, tuple(filters), type, additional_cache_keys) + return self._wrap_cache(key, func) + + def apply_func(self, filters, type, args, kwargs=None, cache_key=None): + """Apply a filter that is not a "stream in, stream out" transform (i.e. + like the input() and output() filter methods). Instead, the filter + method is given the arguments in ``args`` and should then produce an + output stream. This is used, e.g., for the concat() and open() filter + methods. + + Only one such filter can run per operation. + + ``cache_key`` may be a list of additional values to use as the cache + key, in addition to the default key (the filter and arguments). + """ + assert type in self.VALID_FUNCS + log.debug('Need to run method "%s" of one of the filters (%s) ' + 'with args=%s, kwargs=%s', type, filters, args, kwargs) + + filters = [f for f in filters if getattr(f, type, None)] + if not filters: # Short-circuit + log.debug('No filters have a "%s" method' % type) + raise NoFilters() + + if len(filters) > 1: + raise MoreThanOneFilterError( + 'These filters cannot be combined: %s' % ( + ', '.join([f.name for f in filters])), filters) + + kwargs_final = self.kwargs.copy() + kwargs_final.update(kwargs or {}) + + def func(): + filter = filters[0] + out = StringIO(u'') # For 2.x, StringIO().getvalue() returns str + log.debug('Running method "%s" of %s with args=%s, kwargs=%s', + type, filter, args, kwargs) + getattr(filter, type)(out, *args, **kwargs_final) + return out + + additional_cache_keys = [] + if kwargs_final: + for filter in filters: + additional_cache_keys += filter.get_additional_cache_keys(**kwargs_final) + + key = ("hunk", args, tuple(filters), type, cache_key or [], additional_cache_keys) + return self._wrap_cache(key, func) + + +def merge_filters(filters1, filters2): + """Merge two filter lists into one. + + Duplicate filters are removed. Since filter order is important, the order + of the arguments to this function also matter. Duplicates are always + removed from the second filter set if they exist in the first. + + The result will always be ``filters1``, with additional unique filters + from ``filters2`` appended. Within the context of a hierarchy, you want + ``filters2`` to be the parent. + + This function presumes that all the given filters inherit from ``Filter``, + which properly implements operators to determine duplicate filters. + """ + result = list(filters1[:]) + if filters2: + for f in filters2: + if not f in result: + result.append(f) + return result + + +def select_filters(filters, level): + """Return from the list in ``filters`` those filters which indicate that + they should run for the given debug level. + """ + return [f for f in filters + if f.max_debug_level is None or + cmp_debug_levels(level, f.max_debug_level) <= 0] diff --git a/pelican/plugins/webassets/vendor/webassets/py.typed b/pelican/plugins/webassets/vendor/webassets/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pelican/plugins/webassets/vendor/webassets/script.py b/pelican/plugins/webassets/vendor/webassets/script.py new file mode 100644 index 0000000..102a421 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/script.py @@ -0,0 +1,582 @@ +from __future__ import print_function +import shutil +import os, sys +import time +import logging + +from webassets.loaders import PythonLoader, YAMLLoader +from webassets.bundle import get_all_bundle_files +from webassets.exceptions import BuildError +from webassets.updater import TimestampUpdater +from webassets.merge import MemoryHunk +from webassets.version import get_manifest +from webassets.cache import FilesystemCache +from webassets.utils import set, StringIO + + +__all__ = ('CommandError', 'CommandLineEnvironment', 'main') + + +# logging has WARNING as default level, for the CLI we want INFO. Set this +# as early as possible, so that user customizations will not be overwritten. +logging.getLogger('webassets.script').setLevel(logging.INFO) + + +class CommandError(Exception): + pass + + +class Command(object): + """Base-class for a command used by :class:`CommandLineEnvironment`. + + Each command being a class opens up certain possibilities with respect to + subclassing and customizing the default CLI. + """ + + def __init__(self, cmd_env): + self.cmd = cmd_env + + def __getattr__(self, name): + # Make stuff from cmd environment easier to access + return getattr(self.cmd, name) + + def __call__(self, *args, **kwargs): + raise NotImplementedError() + + +class BuildCommand(Command): + + def __call__(self, bundles=None, output=None, directory=None, no_cache=None, + manifest=None, production=None): + """Build assets. + + ``bundles`` + A list of bundle names. If given, only this list of bundles + should be built. + + ``output`` + List of (bundle, filename) 2-tuples. If given, only these + bundles will be built, using the custom output filenames. + Cannot be used with ``bundles``. + + ``directory`` + Custom output directory to use for the bundles. The original + basenames defined in the bundle ``output`` attribute will be + used. If the ``output`` of the bundles are pointing to different + directories, they will be offset by their common prefix. + Cannot be used with ``output``. + + ``no_cache`` + If set, a cache (if one is configured) will not be used. + + ``manifest`` + If set, the given manifest instance will be used, instead of + any that might have been configured in the Environment. The value + passed will be resolved through ``get_manifest()``. If this fails, + a file-based manifest will be used using the given value as the + filename. + + ``production`` + If set to ``True``, then :attr:`Environment.debug`` will forcibly + be disabled (set to ``False``) during the build. + """ + + # Validate arguments + if bundles and output: + raise CommandError( + 'When specifying explicit output filenames you must ' + 'do so for all bundles you want to build.') + if directory and output: + raise CommandError('A custom output directory cannot be ' + 'combined with explicit output filenames ' + 'for individual bundles.') + + if production: + # TODO: Reset again (refactor commands to be classes) + self.environment.debug = False + + # TODO: Oh how nice it would be to use the future options stack. + if manifest is not None: + try: + manifest = get_manifest(manifest, env=self.environment) + except ValueError: + manifest = get_manifest( + # abspath() is important, or this will be considered + # relative to Environment.directory. + "file:%s" % os.path.abspath(manifest), + env=self.environment) + self.environment.manifest = manifest + + # Use output as a dict. + if output: + output = dict(output) + + # Validate bundle names + bundle_names = bundles if bundles else (output.keys() if output else []) + for name in bundle_names: + if not name in self.environment: + raise CommandError( + 'I do not know a bundle name named "%s".' % name) + + # Make a list of bundles to build, and the filename to write to. + if bundle_names: + # TODO: It's not ok to use an internal property here. + bundles = [(n,b) for n, b in self.environment._named_bundles.items() + if n in bundle_names] + else: + # Includes unnamed bundles as well. + bundles = [(None, b) for b in self.environment] + + # Determine common prefix for use with ``directory`` option. + if directory: + prefix = os.path.commonprefix( + [os.path.normpath(b.resolve_output()) + for _, b in bundles if b.output]) + # dirname() gives the right value for a single file. + prefix = os.path.dirname(prefix) + + to_build = [] + for name, bundle in bundles: + # TODO: We really should support this. This error here + # is just in place of a less understandable error that would + # otherwise occur. + if bundle.is_container and directory: + raise CommandError( + 'A custom output directory cannot currently be ' + 'used with container bundles.') + + # Determine which filename to use, if not the default. + overwrite_filename = None + if output: + overwrite_filename = output[name] + elif directory: + offset = os.path.normpath( + bundle.resolve_output())[len(prefix)+1:] + overwrite_filename = os.path.join(directory, offset) + to_build.append((bundle, overwrite_filename, name,)) + + # Build. + built = [] + for bundle, overwrite_filename, name in to_build: + if name: + # A name is not necessary available of the bundle was + # registered without one. + self.log.info("Building bundle: %s (to %s)" % ( + name, overwrite_filename or bundle.output)) + else: + self.log.info("Building bundle: %s" % bundle.output) + + try: + if not overwrite_filename: + with bundle.bind(self.environment): + bundle.build(force=True, disable_cache=no_cache) + else: + # TODO: Rethink how we deal with container bundles here. + # As it currently stands, we write all child bundles + # to the target output, merged (which is also why we + # create and force writing to a StringIO instead of just + # using the ``Hunk`` objects that build() would return + # anyway. + output = StringIO() + with bundle.bind(self.environment): + bundle.build(force=True, output=output, + disable_cache=no_cache) + if directory: + # Only auto-create directories in this mode. + output_dir = os.path.dirname(overwrite_filename) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + MemoryHunk(output.getvalue()).save(overwrite_filename) + built.append(bundle) + except BuildError as e: + self.log.error("Failed, error was: %s" % e) + if len(built): + self.event_handlers['post_build']() + if len(built) != len(to_build): + return 2 + + +class WatchCommand(Command): + + def __call__(self, loop=None): + """Watch assets for changes. + + ``loop`` + A callback, taking no arguments, to be called once every loop + iteration. Can be useful to integrate the command with other code. + If not specified, the loop will call ``time.sleep()``. + """ + # TODO: This should probably also restart when the code changes. + mtimes = {} + + try: + # Before starting to watch for changes, also recognize changes + # made while we did not run, and apply those immediately. + for bundle in self.environment: + print('Bringing up to date: %s' % bundle.output) + bundle.build(force=False) + + self.log.info("Watching %d bundles for changes..." % + len(self.environment)) + + while True: + changed_bundles = self.check_for_changes(mtimes) + + built = [] + for bundle in changed_bundles: + print("Building bundle: %s ..." % bundle.output, end=' ') + sys.stdout.flush() + try: + bundle.build(force=True) + built.append(bundle) + except BuildError as e: + print("") + print("Failed: %s" % e) + else: + print("done") + + if len(built): + self.event_handlers['post_build']() + + do_end = loop() if loop else time.sleep(0.1) + if do_end: + break + except KeyboardInterrupt: + pass + + def check_for_changes(self, mtimes): + # Do not update original mtimes dict right away, so that we detect + # all bundle changes if a file is in multiple bundles. + _new_mtimes = mtimes.copy() + + changed_bundles = set() + # TODO: An optimization was lost here, skipping a bundle once + # a single file has been found to have changed. Bring back. + for filename, bundles_to_update in self.yield_files_to_watch(): + stat = os.stat(filename) + mtime = stat.st_mtime + if sys.platform == "win32": + mtime -= stat.st_ctime + + if mtimes.get(filename, mtime) != mtime: + if callable(bundles_to_update): + # Hook for when file has changed + try: + bundles_to_update = bundles_to_update() + except EnvironmentError: + # EnvironmentError is what the hooks is allowed to + # raise for a temporary problem, like an invalid config + import traceback + traceback.print_exc() + # Don't update anything, wait for another change + bundles_to_update = set() + + if bundles_to_update is True: + # Indicates all bundles should be rebuilt for the change + bundles_to_update = set(self.environment) + changed_bundles |= bundles_to_update + _new_mtimes[filename] = mtime + _new_mtimes[filename] = mtime + + mtimes.update(_new_mtimes) + return changed_bundles + + def yield_files_to_watch(self): + for bundle in self.environment: + for filename in get_all_bundle_files(bundle): + yield filename, set([bundle]) + + +class CleanCommand(Command): + + def __call__(self): + """Delete generated assets. + """ + self.log.info('Cleaning generated assets...') + for bundle in self.environment: + if not bundle.output: + continue + file_path = bundle.resolve_output(self.environment) + if os.path.exists(file_path): + os.unlink(file_path) + self.log.info("Deleted asset: %s" % bundle.output) + if isinstance(self.environment.cache, FilesystemCache): + shutil.rmtree(self.environment.cache.directory) + + +class CheckCommand(Command): + + def __call__(self): + """Check to see if assets need to be rebuilt. + + A non-zero exit status will be returned if any of the input files are + newer (based on mtime) than their output file. This is intended to be + used in pre-commit hooks. + """ + needsupdate = False + updater = self.environment.updater + if not updater: + self.log.debug('no updater configured, using TimestampUpdater') + updater = TimestampUpdater() + for bundle in self.environment: + self.log.info('Checking asset: %s', bundle.output) + if updater.needs_rebuild(bundle, self.environment): + self.log.info(' needs update') + needsupdate = True + if needsupdate: + sys.exit(-1) + + +class CommandLineEnvironment(object): + """Implements the core functionality for a command line frontend to + ``webassets``, abstracted in a way to allow frameworks to integrate the + functionality into their own tools, for example, as a Django management + command, or a command for ``Flask-Script``. + """ + + def __init__(self, env, log, post_build=None, commands=None): + self.environment = env + self.log = log + self.event_handlers = dict(post_build=lambda: True) + if callable(post_build): + self.event_handlers['post_build'] = post_build + + # Instantiate each command + command_def = self.DefaultCommands.copy() + command_def.update(commands or {}) + self.commands = {} + for name, construct in command_def.items(): + if not construct: + continue + if not isinstance(construct, (list, tuple)): + construct = [construct, (), {}] + self.commands[name] = construct[0]( + self, *construct[1], **construct[2]) + + def __getattr__(self, item): + # Allow method-like access to commands. + if item in self.commands: + return self.commands[item] + raise AttributeError(item) + + def invoke(self, command, args): + """Invoke ``command``, or throw a CommandError. + + This is essentially a simple validation mechanism. Feel free + to call the individual command methods manually. + """ + try: + function = self.commands[command] + except KeyError as e: + raise CommandError('unknown command: %s' % e) + else: + return function(**args) + + # List of commands installed + DefaultCommands = { + 'build': BuildCommand, + 'watch': WatchCommand, + 'clean': CleanCommand, + 'check': CheckCommand + } + + +class GenericArgparseImplementation(object): + """Generic command line utility to interact with an webassets environment. + + This is effectively a reference implementation of a command line utility + based on the ``CommandLineEnvironment`` class. Implementers may find it + feasible to simple base their own command line utility on this, rather than + implementing something custom on top of ``CommandLineEnvironment``. In + fact, if that is possible, you are encouraged to do so for greater + consistency across implementations. + """ + + class WatchCommand(WatchCommand): + """Extended watch command that also looks at the config file itself.""" + + def __init__(self, cmd_env, argparse_ns): + WatchCommand.__init__(self, cmd_env) + self.ns = argparse_ns + + def yield_files_to_watch(self): + for result in WatchCommand.yield_files_to_watch(self): + yield result + # If the config changes, rebuild all bundles + if getattr(self.ns, 'config', None): + yield self.ns.config, self.reload_config + + def reload_config(self): + try: + self.cmd.environment = YAMLLoader(self.ns.config).load_environment() + except Exception as e: + raise EnvironmentError(e) + return True + + + def __init__(self, env=None, log=None, prog=None, no_global_options=False): + try: + import argparse + except ImportError: + raise RuntimeError( + 'The webassets command line now requires the ' + '"argparse" library on Python versions <= 2.6.') + else: + self.argparse = argparse + self.env = env + self.log = log + self._construct_parser(prog, no_global_options) + + def _construct_parser(self, prog=None, no_global_options=False): + self.parser = parser = self.argparse.ArgumentParser( + description="Manage assets.", + prog=prog) + + if not no_global_options: + # Start with the base arguments that are valid for any command. + # XXX: Add those to the subparser? + parser.add_argument("-v", dest="verbose", action="store_true", + help="be verbose") + parser.add_argument("-q", action="store_true", dest="quiet", + help="be quiet") + if self.env is None: + loadenv = parser.add_mutually_exclusive_group() + loadenv.add_argument("-c", "--config", dest="config", + help="read environment from a YAML file") + loadenv.add_argument("-m", "--module", dest="module", + help="read environment from a Python module") + + # Add subparsers. + subparsers = parser.add_subparsers(dest='command') + for command in CommandLineEnvironment.DefaultCommands.keys(): + command_parser = subparsers.add_parser(command) + maker = getattr(self, 'make_%s_parser' % command, False) + if maker: + maker(command_parser) + + @staticmethod + def make_build_parser(parser): + parser.add_argument( + 'bundles', nargs='*', metavar='BUNDLE', + help='Optional bundle names to process. If none are ' + 'specified, then all known bundles will be built.') + parser.add_argument( + '--output', '-o', nargs=2, action='append', + metavar=('BUNDLE', 'FILE'), + help='Build the given bundle, and use a custom output ' + 'file. Can be given multiple times.') + parser.add_argument( + '--directory', '-d', + help='Write built files to this directory, using the ' + 'basename defined by the bundle. Will offset ' + 'the original bundle output paths on their common ' + 'prefix. Cannot be used with --output.') + parser.add_argument( + '--no-cache', action='store_true', + help='Do not use a cache that might be configured.') + parser.add_argument( + '--manifest', + help='Write a manifest to the given file. Also supports ' + 'the id:arg format, if you want to use a different ' + 'manifest implementation.') + parser.add_argument( + '--production', action='store_true', + help='Forcably turn off debug mode for the build. This ' + 'only has an effect if debug is set to "merge".') + + def _setup_logging(self, ns): + if self.log: + log = self.log + else: + log = logging.getLogger('webassets.script') + if not log.handlers: + # In theory, this could run multiple times (e.g. tests) + handler = logging.StreamHandler() + log.addHandler(handler) + # Note that setting the level filter at the handler level is + # better than the logger level, since this is "our" handler, + # we create it, for the purposes of having a default output. + # The logger itself the user may be modifying. + handler.setLevel(logging.DEBUG if ns.verbose else ( + logging.WARNING if ns.quiet else logging.INFO)) + return log + + def _setup_assets_env(self, ns, log): + env = self.env + if env is None: + assert not (ns.module and ns.config) + if ns.module: + env = PythonLoader(ns.module).load_environment() + if ns.config: + env = YAMLLoader(ns.config).load_environment() + return env + + def _setup_cmd_env(self, assets_env, log, ns): + return CommandLineEnvironment(assets_env, log, commands={ + 'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {}) + }) + + def _prepare_command_args(self, ns): + # Prepare a dict of arguments cleaned of values that are not + # command-specific, and which the command method would not accept. + args = vars(ns).copy() + for action in self.parser._actions: + dest = action.dest + if dest in args: + del args[dest] + return args + + def run_with_ns(self, ns): + log = self._setup_logging(ns) + env = self._setup_assets_env(ns, log) + if env is None: + raise CommandError( + "Error: No environment given or found. Maybe use -m?") + cmd = self._setup_cmd_env(env, log, ns) + + # Run the selected command + args = self._prepare_command_args(ns) + return cmd.invoke(ns.command, args) + + def run_with_argv(self, argv): + try: + ns = self.parser.parse_args(argv) + except SystemExit as e: + # We do not want the main() function to exit the program. + # See run() instead. + return e.args[0] + + return self.run_with_ns(ns) + + def main(self, argv): + """Parse the given command line. + + The commandline is expected to NOT including what would be sys.argv[0]. + """ + try: + return self.run_with_argv(argv) + except CommandError as e: + print(e) + return 1 + + +def main(argv, env=None): + """Execute the generic version of the command line interface. + + You only need to work directly with ``GenericArgparseImplementation`` if + you desire to customize things. + + If no environment is given, additional arguments will be supported to allow + the user to specify/construct the environment on the command line. + """ + return GenericArgparseImplementation(env).main(argv) + + +def run(): + """Runs the command line interface via ``main``, then exits the process + with a proper return code.""" + sys.exit(main(sys.argv[1:]) or 0) + + +if __name__ == '__main__': + run() diff --git a/pelican/plugins/webassets/vendor/webassets/six.py b/pelican/plugins/webassets/vendor/webassets/six.py new file mode 100644 index 0000000..5e0119a --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/six.py @@ -0,0 +1,417 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2013 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.3.0" + + +# True if we are running on Python 3. +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + # This is a bit ugly, but it avoids running this again. + delattr(tp, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + + +class _MovedItems(types.ModuleType): + """Lazy loading of moved objects""" + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) +del attr + +moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from webassets.six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" + + _iterkeys = "keys" + _itervalues = "values" + _iteritems = "items" + _iterlists = "lists" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + _iterkeys = "iterkeys" + _itervalues = "itervalues" + _iteritems = "iteritems" + _iterlists = "iterlists" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +def iterkeys(d, **kw): + """Return an iterator over the keys of a dictionary.""" + return iter(getattr(d, _iterkeys)(**kw)) + +def itervalues(d, **kw): + """Return an iterator over the values of a dictionary.""" + return iter(getattr(d, _itervalues)(**kw)) + +def iteritems(d, **kw): + """Return an iterator over the (key, value) pairs of a dictionary.""" + return iter(getattr(d, _iteritems)(**kw)) + +def iterlists(d, **kw): + """Return an iterator over the (key, [values]) pairs of a dictionary.""" + return iter(getattr(d, _iterlists)(**kw)) + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + def u(s): + return unicode(s, "unicode_escape") + int2byte = chr + def indexbytes(buf, i): + return ord(buf[i]) + def iterbytes(buf): + return (ord(byte) for byte in buf) + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + + print_ = getattr(builtins, "print") + del builtins + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + + def print_(*args, **kwargs): + """The new-style print function.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + return meta("NewBase", bases, {}) diff --git a/pelican/plugins/webassets/vendor/webassets/test.py b/pelican/plugins/webassets/vendor/webassets/test.py new file mode 100644 index 0000000..359aef5 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/test.py @@ -0,0 +1,154 @@ +"""Helpers for testing webassets. + +This is included in the webassets package because it is useful for testing +external libraries that use webassets (like the flask-assets wrapper). +""" +from __future__ import print_function + +import tempfile +import shutil +import os +from os import path +import time + +from webassets import Environment, Bundle +from webassets.six.moves import map +from webassets.six.moves import zip + + +__all__ = ('TempDirHelper', 'TempEnvironmentHelper',) + + +class TempDirHelper(object): + """Base-class for tests which provides a temporary directory + (which is properly deleted after the test is done), and various + helper methods to do filesystem operations within that directory. + """ + + default_files = {} + + def setup_method(self): + self._tempdir_created = tempfile.mkdtemp() + self.create_files(self.default_files) + + def teardown_method(self): + shutil.rmtree(self._tempdir_created) + + def __enter__(self): + self.setup_method() + return self + + def __exit__(self, type, value, traceback): + self.teardown_method() + + @property + def tempdir(self): + # Use a read-only property here, so the user is + # less likely to modify the attribute, and have + # his data deleted on teardown. + return self._tempdir_created + + def create_files(self, files): + """Helper that allows to quickly create a bunch of files in + the media directory of the current test run. + """ + import codecs + # Allow passing a list of filenames to create empty files + if not hasattr(files, 'items'): + files = dict(map(lambda n: (n, ''), files)) + for name, data in files.items(): + dirs = path.dirname(self.path(name)) + if not path.exists(dirs): + os.makedirs(dirs) + f = codecs.open(self.path(name), 'w', 'utf-8') + f.write(data) + f.close() + + def create_directories(self, *dirs): + """Helper to create directories within the media directory + of the current test's environment. + """ + result = [] + for dir in dirs: + full_path = self.path(dir) + result.append(full_path) + os.makedirs(full_path) + return result + + def exists(self, name): + """Ensure the given file exists within the current test run's + media directory. + """ + return path.exists(self.path(name)) + + def get(self, name): + """Return the given file's contents. + """ + with open(self.path(name)) as f: + r = f.read() + print(repr(r)) + return r + + def unlink(self, name): + os.unlink(self.path(name)) + + def path(self, name): + """Return the given file's full path.""" + return path.join(self._tempdir_created, name) + + def setmtime(self, *files, **kwargs): + """Set the mtime of the given files. Useful helper when + needing to test things like the timestamp updater. + + Specify ``mtime`` as a keyword argument, or time.time() + will automatically be used. Returns the mtime used. + + Specify ``mod`` as a keyword argument, and the modifier + will be added to the ``mtime`` used. + """ + mtime = kwargs.pop('mtime', time.time()) + mtime += kwargs.pop('mod', 0) + assert not kwargs, "Unsupported kwargs: %s" % ', '.join(kwargs.keys()) + for f in files: + os.utime(self.path(f), (mtime, mtime)) + return mtime + + def p(self, *files): + """Print the contents of the given files to stdout; useful + for some quick debugging. + """ + if not files: + files = ['out'] # This is a often used output filename + for f in files: + content = self.get(f) + print(f) + print("-" * len(f)) + print(repr(content)) + print(content) + print() + + +class TempEnvironmentHelper(TempDirHelper): + """Base-class for tests which provides a pre-created + environment, based in a temporary directory, and utility + methods to do filesystem operations within that directory. + """ + + default_files = {'in1': 'A', 'in2': 'B', 'in3': 'C', 'in4': 'D'} + + def setup_method(self): + TempDirHelper.setup_method(self) + + self.env = self._create_environment() + # Unless we explicitly test it, we don't want to use the cache + # during testing. + self.env.cache = False + self.env.manifest = False + + def _create_environment(self): + return Environment(self._tempdir_created, '') + + def mkbundle(self, *a, **kw): + b = Bundle(*a, **kw) + b.env = self.env + return b diff --git a/pelican/plugins/webassets/vendor/webassets/updater.py b/pelican/plugins/webassets/vendor/webassets/updater.py new file mode 100644 index 0000000..0153cc7 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/updater.py @@ -0,0 +1,192 @@ +"""The auto-rebuild system is an optional part of webassets that can be used +during development, and can also be quite convenient on small sites that don't +have the performance requirements where a rebuild-check on every request is +fatal. + +This module contains classes that help determine whether a rebuild is required +for a bundle. This is more complicated than simply comparing the timestamps of +the source and output files. + +First, certain filters, in particular CSS compilers like SASS, allow bundle +source files to reference additional files which the user may not have listed +in the bundle definition. The bundles support an additional ``depends`` +argument that can list files that should be watched for modification. + +Second, if the bundle definition itself changes, i.e., source files being added +or removed, or the list of applied filters modified, the bundle needs to be +rebuilt also. Since there is no single fixed place where bundles are defined, +simply watching the timestamp of that bundle definition file is not good enough. + +To solve the latter problem, we employ an environment-specific cache of bundle +definitions. + +Note that there is no ``HashUpdater``. This doesn't make sense for two reasons. +First, for a live system, it isn't fast enough. Second, for prebuilding assets, +the cache is a superior solution for getting essentially the same speed +increase as using the hash to reliably determine which bundles to skip. +""" + +from webassets import six +from webassets.six.moves import map +from webassets.six.moves import zip +from webassets.exceptions import BundleError, BuildError +from webassets.utils import RegistryMetaclass, is_url, hash_func + + +__all__ = ('get_updater', 'SKIP_CACHE', + 'TimestampUpdater', 'AlwaysUpdater',) + + +SKIP_CACHE = object() +"""An updater can return this value as hint that a cache, if enabled, +should probably not be used for the rebuild; This is currently used +as a return value when a bundle's dependencies have changed, which +would currently not cause a different cache key to be used. + +This is marked a hint, because in the future, the bundle may be smart +enough to make this decision by itself. +""" + + +class BaseUpdater(six.with_metaclass(RegistryMetaclass( + clazz=lambda: BaseUpdater, attribute='needs_rebuild', + desc='an updater implementation'))): + """Base updater class. + + Child classes that define an ``id`` attribute are accessible via their + string id in the configuration. + + A single instance can be used with different environments. + """ + + def needs_rebuild(self, bundle, ctx): + """Returns ``True`` if the given bundle needs to be rebuilt, + ``False`` otherwise. + """ + raise NotImplementedError() + + def build_done(self, bundle, ctx): + """This will be called once a bundle has been successfully built. + """ + + +get_updater = BaseUpdater.resolve + + +class BundleDefUpdater(BaseUpdater): + """Supports the bundle definition cache update check that child + classes are usually going to want to use also. + """ + + def check_bundle_definition(self, bundle, ctx): + if not ctx.cache: + # If no global cache is configured, we could always + # fall back to a memory-cache specific for the rebuild + # process (store as env._update_cache); however, + # whenever a bundle definition changes, it's likely that + # a process restart will be required also, so in most cases + # this would make no sense. + return False + + cache_key = ('bdef', bundle.output) + current_hash = "%s" % hash_func(bundle) + cached_hash = ctx.cache.get(cache_key) + # This may seem counter-intuitive, but if no cache entry is found + # then we actually return "no update needed". This is because + # otherwise if no cache / a dummy cache is used, then we would be + # rebuilding every single time. + if not cached_hash is None: + return cached_hash != current_hash + return False + + def needs_rebuild(self, bundle, ctx): + return self.check_bundle_definition(bundle, ctx) + + def build_done(self, bundle, ctx): + if not ctx.cache: + return False + cache_key = ('bdef', bundle.output) + cache_value = "%s" % hash_func(bundle) + ctx.cache.set(cache_key, cache_value) + + +class TimestampUpdater(BundleDefUpdater): + + id = 'timestamp' + + def check_timestamps(self, bundle, ctx, o_modified=None): + from .bundle import Bundle + from webassets.version import TimestampVersion + + if not o_modified: + try: + resolved_output = bundle.resolve_output(ctx) + except BundleError: + # This exception will occur when the bundle output has + # placeholder, but a version cannot be found. If the + # user has defined a manifest, this will just be the first + # build. Return True to let it happen. + # However, if no manifest is defined, raise an error, + # because otherwise, this updater would always return True, + # and thus not do its job at all. + if ctx.manifest is None: + raise BuildError(( + '%s uses a version placeholder, and you are ' + 'using "%s" versions. To use automatic ' + 'building in this configuration, you need to ' + 'define a manifest.' % (bundle, ctx.versions))) + return True + + try: + o_modified = TimestampVersion.get_timestamp(resolved_output) + except OSError: + # If the output file does not exist, we'll have to rebuild + return True + + # Recurse through the bundle hierarchy. Check the timestamp of all + # the bundle source files, as well as any additional + # dependencies that we are supposed to watch. + from webassets.bundle import wrap + for iterator, result in ( + (lambda e: map(lambda s: s[1], bundle.resolve_contents(e)), True), + (bundle.resolve_depends, SKIP_CACHE) + ): + for item in iterator(ctx): + if isinstance(item, Bundle): + nested_result = self.check_timestamps(item, wrap(ctx, item), o_modified) + if nested_result: + return nested_result + elif not is_url(item): + try: + s_modified = TimestampVersion.get_timestamp(item) + except OSError: + # If a file goes missing, always require + # a rebuild. + return result + else: + if s_modified > o_modified: + return result + return False + + def needs_rebuild(self, bundle, ctx): + return \ + super(TimestampUpdater, self).needs_rebuild(bundle, ctx) or \ + self.check_timestamps(bundle, ctx) + + def build_done(self, bundle, ctx): + # Reset the resolved dependencies, so any globs will be + # re-resolved the next time we check if a rebuild is + # required. This ensures that we begin watching new files + # that are created, while still caching the globs as long + # no changes happen. + bundle._resolved_depends = None + super(TimestampUpdater, self).build_done(bundle, ctx) + + +class AlwaysUpdater(BaseUpdater): + + id = 'always' + + def needs_rebuild(self, bundle, ctx): + return True + diff --git a/pelican/plugins/webassets/vendor/webassets/utils.py b/pelican/plugins/webassets/vendor/webassets/utils.py new file mode 100644 index 0000000..985f5ce --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/utils.py @@ -0,0 +1,249 @@ +from webassets import six +import contextlib +import os +import sys +import re +from itertools import takewhile + +from .exceptions import BundleError + + +__all__ = ('md5_constructor', 'pickle', 'set', 'StringIO', + 'common_path_prefix', 'working_directory', 'is_url') + + +import base64 + +if sys.version_info >= (2, 5): + import hashlib + md5_constructor = hashlib.md5 +else: + import md5 + md5_constructor = md5.new + + +try: + import cPickle as pickle +except ImportError: + import pickle + + +try: + set +except NameError: + from sets import Set as set +else: + set = set + + +try: + FileNotFoundError +except NameError: + FileNotFoundError = IOError +else: + FileNotFoundError = FileNotFoundError + + +from webassets.six import StringIO + + +try: + from urllib import parse as urlparse +except ImportError: # Python 2 + import urlparse + import urllib + +def hash_func(data): + from .cache import make_md5 + return make_md5(data) + + +_directory_separator_re = re.compile(r"[/\\]+") + + +def common_path_prefix(paths, sep=os.path.sep): + """os.path.commonpath() is completely in the wrong place; it's + useless with paths since it only looks at one character at a time, + see http://bugs.python.org/issue10395 + + This replacement is from: + http://rosettacode.org/wiki/Find_Common_Directory_Path#Python + """ + def allnamesequal(name): + return all(n==name[0] for n in name[1:]) + + # The regex splits the paths on both / and \ characters, whereas the + # rosettacode.org algorithm only uses os.path.sep + bydirectorylevels = zip(*[_directory_separator_re.split(p) for p in paths]) + return sep.join(x[0] for x in takewhile(allnamesequal, bydirectorylevels)) + + +@contextlib.contextmanager +def working_directory(directory=None, filename=None): + """A context manager which changes the working directory to the given + path, and then changes it back to its previous value on exit. + + Filters will often find this helpful. + + Instead of a ``directory``, you may also give a ``filename``, and the + working directory will be set to the directory that file is in.s + """ + assert bool(directory) != bool(filename) # xor + if not directory: + directory = os.path.dirname(filename) + prev_cwd = os.getcwd() + os.chdir(directory) + try: + yield + finally: + os.chdir(prev_cwd) + + +def make_option_resolver(clazz=None, attribute=None, classes=None, + allow_none=True, desc=None): + """Returns a function which can resolve an option to an object. + + The option may given as an instance or a class (of ``clazz``, or + duck-typed with an attribute ``attribute``), or a string value referring + to a class as defined by the registry in ``classes``. + + This support arguments, so an option may look like this: + + cache:/tmp/cachedir + + If this must instantiate a class, it will pass such an argument along, + if given. In addition, if the class to be instantiated has a classmethod + ``make()``, this method will be used as a factory, and will be given an + Environment object (if one has been passed to the resolver). This allows + classes that need it to initialize themselves based on an Environment. + """ + assert clazz or attribute or classes + desc_string = ' to %s' % desc if desc else None + + def instantiate(clazz, env, *a, **kw): + # Create an instance of clazz, via the Factory if one is defined, + # passing along the Environment, or creating the class directly. + if hasattr(clazz, 'make'): + # make() protocol is that if e.g. the get_manifest() resolver takes + # an env, then the first argument of the factory is the env. + args = (env,) + a if env is not None else a + return clazz.make(*args, **kw) + return clazz(*a, **kw) + + def resolve_option(option, env=None): + the_clazz = clazz() if callable(clazz) and not isinstance(option, type) else clazz + + if not option and allow_none: + return None + + # If the value has one of the support attributes (duck-typing). + if attribute and hasattr(option, attribute): + if isinstance(option, type): + return instantiate(option, env) + return option + + # If it is the class we support. + if the_clazz and isinstance(option, the_clazz): + return option + elif isinstance(option, type) and issubclass(option, the_clazz): + return instantiate(option, env) + + # If it is a string + elif isinstance(option, six.string_types): + parts = option.split(':', 1) + key = parts[0] + arg = parts[1] if len(parts) > 1 else None + if key in classes: + return instantiate(classes[key], env, *([arg] if arg else [])) + + raise ValueError('%s cannot be resolved%s' % (option, desc_string)) + resolve_option.__doc__ = """Resolve ``option``%s.""" % desc_string + + return resolve_option + + +def RegistryMetaclass(clazz=None, attribute=None, allow_none=True, desc=None): + """Returns a metaclass which will keep a registry of all subclasses, keyed + by their ``id`` attribute. + + The metaclass will also have a ``resolve`` method which can turn a string + into an instance of one of the classes (based on ``make_option_resolver``). + """ + def eq(self, other): + """Return equality with config values that instantiate this.""" + return (hasattr(self, 'id') and self.id == other) or\ + id(self) == id(other) + def unicode(self): + return "%s" % (self.id if hasattr(self, 'id') else repr(self)) + + class Metaclass(type): + REGISTRY = {} + + def __new__(mcs, name, bases, attrs): + if not '__eq__' in attrs: + attrs['__eq__'] = eq + if not '__unicode__' in attrs: + attrs['__unicode__'] = unicode + if not '__str__' in attrs: + attrs['__str__'] = unicode + new_klass = type.__new__(mcs, name, bases, attrs) + if hasattr(new_klass, 'id'): + mcs.REGISTRY[new_klass.id] = new_klass + return new_klass + + resolve = staticmethod(make_option_resolver( + clazz=clazz, + attribute=attribute, + allow_none=allow_none, + desc=desc, + classes=REGISTRY + )) + return Metaclass + + +def cmp_debug_levels(level1, level2): + """cmp() for debug levels, returns True if ``level1`` is higher + than ``level2``.""" + level_ints = {False: 0, 'merge': 1, True: 2} + try: + cmp = lambda a, b: (a > b) - (a < b) # 333 + return cmp(level_ints[level1], level_ints[level2]) + except KeyError as e: + # Not sure if a dependency on BundleError is proper here. Validating + # debug values should probably be done on assign. But because this + # needs to happen in two places (Environment and Bundle) we do it here. + raise BundleError('Invalid debug value: %s' % e) + + +def is_url(s): + if not isinstance(s, str): + return False + parsed = urlparse.urlsplit(s) + return bool(parsed.scheme and parsed.netloc) and len(parsed.scheme) > 1 + + +def calculate_sri(data): + """Calculate SRI string for data buffer.""" + hash = hashlib.sha384() + hash.update(data) + hash = hash.digest() + hash_base64 = base64.b64encode(hash).decode() + return 'sha384-{}'.format(hash_base64) + + +def calculate_sri_on_file(file_name): + """Calculate SRI string if file can be found. Otherwise silently return None""" + BUF_SIZE = 65536 + hash = hashlib.sha384() + try: + with open(file_name, 'rb') as f: + while True: + data = f.read(BUF_SIZE) + if not data: + break + hash.update(data) + hash = hash.digest() + hash_base64 = base64.b64encode(hash).decode() + return 'sha384-{}'.format(hash_base64) + except FileNotFoundError: + return None diff --git a/pelican/plugins/webassets/vendor/webassets/version.py b/pelican/plugins/webassets/vendor/webassets/version.py new file mode 100644 index 0000000..5bb7160 --- /dev/null +++ b/pelican/plugins/webassets/vendor/webassets/version.py @@ -0,0 +1,323 @@ +"""This module defines the Version classes, and the related Manifest +implementations. +""" + +from __future__ import with_statement + +import os +import pickle +from webassets import six + +from webassets.merge import FileHunk +from webassets.utils import md5_constructor, RegistryMetaclass, is_url + + +__all__ = ('get_versioner', 'VersionIndeterminableError', + 'Version', 'TimestampVersion', + 'get_manifest', 'HashVersion', 'Manifest', 'FileManifest',) + + +class VersionIndeterminableError(Exception): + pass + + +class Version(six.with_metaclass(RegistryMetaclass( + clazz=lambda: Version, attribute='determine_version', + desc='a version implementation'))): + """A Version class that can be assigned to the ``Environment.versioner`` + attribute. + + Given a bundle, this must determine its "version". This version can then + be used in the output filename of the bundle, or appended to the url as a + query string, in order to expire cached assets. + + A version could be a timestamp, a content hash, or a git revision etc. + + As a user, all you need to care about, in most cases, is whether you want + to set the ``Environment.versioner`` attribute to ``hash`` or ``timestamp``. + + A single instance can be used with different environments. + """ + + def determine_version(self, bundle, ctx, hunk=None): + """Return a string that represents the current version of the given + bundle. + + This method is called on two separate occasions: + + 1) After a bundle has been built and is about to be saved. If the + output filename contains a placeholder, this method is asked for the + version. This mode is indicated by the ``hunk`` argument being + available. + + 2) When a version is required for an already built file, either + because: + + *) An URL needs to be constructed. + *) It needs to be determined if a bundle needs an update. + + *This will only occur* if *no manifest* is used. If there is a + manifest, it would be used to determine the version instead. + + Support for option (2) is optional. If not supported, then in those + cases a manifest needs to be configured. ``VersionIndeterminableError`` + should be raised with a message why. + """ + raise NotImplementedError() + + def set_version(self, bundle, ctx, filename, version): + """Hook called after a bundle has been built. Some version classes + may need this. + """ + + +get_versioner = Version.resolve + + +class TimestampVersion(Version): + """Uses the most recent 'last modified' timestamp of all source files + as the version. + + Uses second-precision. + """ + + id = 'timestamp' + + def determine_version(self, bundle, ctx, hunk=None): + # Only look at an existing output file if we are not about to + # overwrite it with a new version. But if we can, simply using the + # timestamp of the final file is the fastest way to do this. + # Note that this works because of our ``save_done`` hook. + if not hunk: + from webassets.bundle import has_placeholder + if not has_placeholder(bundle.output): + return self.get_timestamp(bundle.resolve_output(ctx)) + + # If we need the timestamp for the file we just built (hunk!=None), + # or if we need the timestamp for a bundle with a placeholder, + # the way to get it is by looking at the source files. + try: + return self.find_recent_most_timestamp(bundle, ctx) + except OSError: + # Source files are missing. Under these circumstances, we cannot + # return a proper version. + assert hunk is None + raise VersionIndeterminableError( + 'source files are missing and output target has a ' + 'placeholder') + + def set_version(self, bundle, ctx, filename, version): + # Update the mtime of the newly created file with the version + os.utime(filename, (-1, version)) + + @classmethod + def get_timestamp(cls, filename): + return int(os.stat(filename).st_mtime) # Let OSError pass + + @classmethod + def find_recent_most_timestamp(cls, bundle, ctx): + from webassets.bundle import get_all_bundle_files + # Recurse through the bundle hierarchy. Check the timestamp of all + # the bundle source files, as well as any additional + # dependencies that we are supposed to watch. + most_recent = None + for filename in get_all_bundle_files(bundle, ctx): + if is_url(filename): + continue + timestamp = cls.get_timestamp(filename) + if most_recent is None or timestamp > most_recent: + most_recent = timestamp + return most_recent + + +class HashVersion(Version): + """Uses the MD5 hash of the content as the version. + + By default, only the first 8 characters of the hash are used, which + should be sufficient. This can be changed by passing the appropriate + ``length`` value to ``__init__`` (or ``None`` to use the full hash). + + You can also customize the hash used by passing the ``hash`` argument. + All constructors from ``hashlib`` are supported. + """ + + id = 'hash' + + @classmethod + def make(cls, length=None): + args = [int(length)] if length else [] + return cls(*args) + + def __init__(self, length=8, hash=md5_constructor): + self.length = length + self.hasher = hash + + def determine_version(self, bundle, ctx, hunk=None): + if not hunk: + from webassets.bundle import has_placeholder + if not has_placeholder(bundle.output): + hunk = FileHunk(bundle.resolve_output(ctx)) + else: + # Can cannot determine the version of placeholder files. + raise VersionIndeterminableError( + 'output target has a placeholder') + + hasher = self.hasher() + hasher.update(hunk.data().encode('utf-8')) + return hasher.hexdigest()[:self.length] + + +class Manifest(six.with_metaclass(RegistryMetaclass( + clazz=lambda: Manifest, desc='a manifest implementation'))): + """Persists information about the versions bundles are at. + + The Manifest plays a role only if you insert the bundle version in your + output filenames, or append the version as a querystring to the url (via + the url_expire option). It serves two purposes: + + - Without a manifest, it may be impossible to determine the version + at runtime. In a deployed app, the media files may be stored on + a different server entirely, and be inaccessible from the application + code. The manifest, if shipped with your application, is what still + allows to construct the proper URLs. + + - Even if it were possible to determine the version at runtime without + a manifest, it may be a costly process, and using a manifest may + give you better performance. If you use a hash-based version for + example, this hash would need to be recalculated every time a new + process is started. (*) + + (*) It needs to happen only once per process, because Bundles are smart + enough to cache their own version in memory. + + A special case is the ``Environment.auto_build`` option. A manifest + implementation should re-read its data from its out-of-process data + source on every request, if ``auto_build`` is enabled. Otherwise, if your + application is served by multiple processes, then after an automatic + rebuild in one process all other processes would continue to serve an old + version of the file (or attach an old version to the query string). + + A manifest instance is currently not guaranteed to function correctly + with multiple Environment instances. + """ + + def remember(self, bundle, ctx, version): + raise NotImplementedError() + + def query(self, bundle, ctx): + raise NotImplementedError() + + +get_manifest = Manifest.resolve + + +class FileManifest(Manifest): + """Stores version data in a single file. + + Uses Python's pickle module to store a dict data structure. You should + only use this when the manifest is read-only in production, since it is + not multi-process safe. If you use ``auto_build`` in production, use + ``CacheManifest`` instead. + + By default, the file is named ".webassets-manifest" and stored in + ``Environment.directory``. + """ + + id = 'file' + + @classmethod + def make(cls, ctx, filename=None): + if not filename: + filename = '.webassets-manifest' + return cls(os.path.join(ctx.directory, filename)) + + def __init__(self, filename): + self.filename = filename + self._load_manifest() + + def remember(self, bundle, ctx, version): + self.manifest[bundle.output] = version + self._save_manifest() + + def query(self, bundle, ctx): + if ctx.auto_build: + self._load_manifest() + return self.manifest.get(bundle.output, None) + + def _load_manifest(self): + if os.path.exists(self.filename): + with open(self.filename, 'rb') as f: + self.manifest = pickle.load(f) + else: + self.manifest = {} + + def _save_manifest(self): + with open(self.filename, 'wb') as f: + pickle.dump(self.manifest, f, protocol=2) + + +class JsonManifest(FileManifest): + """Same as ``FileManifest``, but uses JSON instead of pickle.""" + + id = 'json' + + def __init__(self, *a, **kw): + try: + import json + except ImportError: + import simplejson as json + self.json = json + super(JsonManifest, self).__init__(*a, **kw) + + def _load_manifest(self): + if os.path.exists(self.filename): + with open(self.filename, 'r') as f: + self.manifest = self.json.load(f) + else: + self.manifest = {} + + def _save_manifest(self): + with open(self.filename, 'w') as f: + self.json.dump(self.manifest, f, indent=4, sort_keys=True) + + +class CacheManifest(Manifest): + """Stores version data in the webassets cache. + + Since this has bad portability (you hardly want to copy your cache between + machines), this only makes sense when you are building on the same machine + where you're application code runs. + + When you are using ``auto_build`` in production, this is exactly what you + want to use, since it is multi-process safe. + """ + + id = 'cache' + + def _check(self, ctx): + if not ctx.cache: + raise EnvironmentError( + 'You are using the cache manifest, but have not ' + 'enabled the cache.') + + def remember(self, bundle, ctx, version): + self._check(ctx) + ctx.cache.set(('manifest', bundle.output), version) + + def query(self, bundle, ctx): + self._check(ctx) + return ctx.cache.get(('manifest', bundle.output)) + + +class SymlinkManifest(Manifest): + """Creates a symlink to the actual file. + + E.g. compressed-current.js -> compressed-1ebcdc5.js + """ + + # Implementation notes: Would presumably be Linux only initially, + # could clean up after itself, may be hard to implement and maybe + # shouldn't, would only we usable to resolve placeholders in filenames. + + def __init__(self): + raise NotImplementedError() # TODO From 24f5bad837b7e7b45a95bbde9f80f6be44464546 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:10:54 -0700 Subject: [PATCH 02/23] vendor: rewrite the imports in webassets --- .../webassets/vendor/webassets/bundle.py | 6 +- .../webassets/vendor/webassets/cache.py | 8 +- .../plugins/webassets/vendor/webassets/env.py | 329 +++++++++++------- .../webassets/vendor/webassets/ext/jinja2.py | 6 +- .../vendor/webassets/filter/__init__.py | 12 +- .../vendor/webassets/filter/autoprefixer.py | 4 +- .../vendor/webassets/filter/babel.py | 2 +- .../vendor/webassets/filter/cleancss.py | 2 +- .../vendor/webassets/filter/clevercss.py | 2 +- .../vendor/webassets/filter/closure.py | 2 +- .../webassets/filter/closure_stylesheets.py | 2 +- .../webassets/filter/closure_templates.py | 4 +- .../vendor/webassets/filter/coffeescript.py | 4 +- .../vendor/webassets/filter/compass.py | 6 +- .../vendor/webassets/filter/cssmin.py | 2 +- .../vendor/webassets/filter/cssprefixer.py | 2 +- .../webassets/filter/cssrewrite/__init__.py | 4 +- .../webassets/filter/cssrewrite/base.py | 4 +- .../vendor/webassets/filter/cssutils.py | 2 +- .../vendor/webassets/filter/datauri.py | 4 +- .../webassets/vendor/webassets/filter/dust.py | 2 +- .../vendor/webassets/filter/handlebars.py | 4 +- .../webassets/vendor/webassets/filter/jade.py | 4 +- .../vendor/webassets/filter/jinja2.py | 2 +- .../vendor/webassets/filter/jsmin.py | 2 +- .../webassets/filter/jspacker/__init__.py | 2 +- .../webassets/vendor/webassets/filter/jst.py | 4 +- .../webassets/vendor/webassets/filter/less.py | 4 +- .../vendor/webassets/filter/less_ruby.py | 4 +- .../vendor/webassets/filter/libsass.py | 2 +- .../vendor/webassets/filter/node_sass.py | 2 +- .../vendor/webassets/filter/postcss.py | 4 +- .../vendor/webassets/filter/pyscss.py | 4 +- .../vendor/webassets/filter/rcssmin.py | 2 +- .../vendor/webassets/filter/replace.py | 2 +- .../vendor/webassets/filter/requirejs.py | 2 +- .../webassets/filter/rjsmin/__init__.py | 2 +- .../webassets/vendor/webassets/filter/sass.py | 4 +- .../vendor/webassets/filter/sass_ruby.py | 6 +- .../vendor/webassets/filter/slimit.py | 2 +- .../vendor/webassets/filter/slimmer.py | 2 +- .../vendor/webassets/filter/spritemapper.py | 4 +- .../vendor/webassets/filter/stylus.py | 2 +- .../vendor/webassets/filter/typescript.py | 4 +- .../vendor/webassets/filter/uglifyjs.py | 2 +- .../webassets/vendor/webassets/filter/yui.py | 2 +- .../webassets/vendor/webassets/loaders.py | 16 +- .../webassets/vendor/webassets/merge.py | 4 +- .../webassets/vendor/webassets/script.py | 255 ++++++++------ .../webassets/vendor/webassets/test.py | 6 +- .../webassets/vendor/webassets/updater.py | 14 +- .../webassets/vendor/webassets/utils.py | 4 +- .../webassets/vendor/webassets/version.py | 12 +- 53 files changed, 457 insertions(+), 337 deletions(-) diff --git a/pelican/plugins/webassets/vendor/webassets/bundle.py b/pelican/plugins/webassets/vendor/webassets/bundle.py index bbb23d8..03b6fa8 100644 --- a/pelican/plugins/webassets/vendor/webassets/bundle.py +++ b/pelican/plugins/webassets/vendor/webassets/bundle.py @@ -1,9 +1,9 @@ from contextlib import contextmanager import os from os import path -from webassets import six -from webassets.six.moves import map -from webassets.six.moves import zip +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.six.moves import map +from pelican.plugins.webassets.vendor.webassets.six.moves import zip from .filter import get_filter from .merge import (FileHunk, UrlHunk, FilterTool, merge, merge_filters, diff --git a/pelican/plugins/webassets/vendor/webassets/cache.py b/pelican/plugins/webassets/vendor/webassets/cache.py index 60fabb5..482f17b 100644 --- a/pelican/plugins/webassets/vendor/webassets/cache.py +++ b/pelican/plugins/webassets/vendor/webassets/cache.py @@ -18,10 +18,10 @@ import errno import tempfile import warnings -from webassets import six -from webassets.merge import BaseHunk -from webassets.filter import Filter, freezedicts -from webassets.utils import md5_constructor, pickle +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.merge import BaseHunk +from pelican.plugins.webassets.vendor.webassets.filter import Filter, freezedicts +from pelican.plugins.webassets.vendor.webassets.utils import md5_constructor, pickle import types diff --git a/pelican/plugins/webassets/vendor/webassets/env.py b/pelican/plugins/webassets/vendor/webassets/env.py index 90b8cd0..8598438 100644 --- a/pelican/plugins/webassets/vendor/webassets/env.py +++ b/pelican/plugins/webassets/vendor/webassets/env.py @@ -1,24 +1,25 @@ import os -from os import path from itertools import chain -from webassets import six -from webassets.six.moves import map -from webassets.utils import is_url +from os import path + +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.six.moves import map +from pelican.plugins.webassets.vendor.webassets.utils import is_url try: - import glob2 as glob from glob import has_magic + + import glob2 as glob except ImportError: import glob from glob import has_magic from .cache import get_cache -from .version import get_versioner, get_manifest from .updater import get_updater from .utils import urlparse +from .version import get_manifest, get_versioner - -__all__ = ('Environment', 'RegisterError') +__all__ = ("Environment", "RegisterError") class RegisterError(Exception): @@ -89,7 +90,7 @@ def _set_deprecated(self, key, value): def url_prefix_join(prefix, fragment): """Join url prefix with fragment.""" # Ensures urljoin will not cut the last part. - prefix += prefix[-1:] != '/' and '/' or '' + prefix += prefix[-1:] != "/" and "/" or "" return urlparse.urljoin(prefix, fragment) @@ -109,8 +110,9 @@ class Resolver(object): def glob(self, basedir, expr): """Evaluates a glob expression. - Yields a sorted list of absolute filenames. + Yields a sorted list of absolute filenames. """ + def glob_generator(basedir, expr): expr = path.join(basedir, expr) for filename in glob.iglob(expr): @@ -165,8 +167,7 @@ def search_load_path(self, ctx, item): result = self.glob(path, item) if result: return result - raise IOError("'%s' not found in load path: %s" % ( - item, ctx.load_path)) + raise IOError("'%s' not found in load path: %s" % (item, ctx.load_path)) def search_for_source(self, ctx, item): """Called by :meth:`resolve_source` after determining that @@ -198,22 +199,22 @@ def query_url_mapping(self, ctx, filepath): pass # Make sure paths are absolute, normalized, and sorted by length - mapping = list(map( - lambda p_u: (path.normpath(path.abspath(p_u[0])), p_u[1]), - mapping)) + mapping = list( + map(lambda p_u: (path.normpath(path.abspath(p_u[0])), p_u[1]), mapping) + ) mapping.sort(key=lambda i: len(i[0]), reverse=True) needle = path.normpath(filepath) for candidate, url in mapping: if needle.startswith(candidate): # Found it! - rel_path = needle[len(candidate) + 1:] + rel_path = needle[len(candidate) + 1 :] # If there are any subdirs in rel_path, ensure # they use HTML-style path separators, in case # the local OS (Windows!) has a different scheme rel_path = rel_path.replace(os.sep, "/") return url_prefix_join(url, rel_path) - raise ValueError('Cannot determine url for %s' % filepath) + raise ValueError("Cannot determine url for %s" % filepath) def resolve_source(self, ctx, item): """Given ``item`` from a Bundle's contents, this has to @@ -302,7 +303,6 @@ def resolve_output_to_url(self, ctx, target): class BundleRegistry(object): - def __init__(self): self._named_bundles = {} self._anon_bundles = [] @@ -321,7 +321,8 @@ def __len__(self): def __bool__(self): return True - __nonzero__ = __bool__ # For Python 2 + + __nonzero__ = __bool__ # For Python 2 def register(self, name, *args, **kwargs): """Register a :class:`Bundle` with the given ``name``. @@ -357,7 +358,7 @@ def register(self, name, *args, **kwargs): return if len(args) == 0: - raise TypeError('at least two arguments are required') + raise TypeError("at least two arguments are required") else: if len(args) == 1 and not kwargs and isinstance(args[0], Bundle): bundle = args[0] @@ -371,11 +372,13 @@ def register(self, name, *args, **kwargs): if self._named_bundles[name] == bundle: pass # ignore else: - raise RegisterError('Another bundle is already registered ' + - 'as "%s": %s' % (name, self._named_bundles[name])) + raise RegisterError( + "Another bundle is already registered " + + 'as "%s": %s' % (name, self._named_bundles[name]) + ) else: self._named_bundles[name] = bundle - bundle.env = self # take ownership + bundle.env = self # take ownership return bundle @@ -389,17 +392,19 @@ def add(self, *bundles): """ for bundle in bundles: self._anon_bundles.append(bundle) - bundle.env = self # take ownership + bundle.env = self # take ownership def decompose_bundle(self, name, bundle): from .bundle import Bundle if not bundle.output: - raise RegisterError('If `merge` is False, an output must be defined') + raise RegisterError("If `merge` is False, an output must be defined") for content in bundle.contents: if isinstance(content, Bundle): - raise RegisterError('Nested bundles are not allowed when `merge` is False') + raise RegisterError( + "Nested bundles are not allowed when `merge` is False" + ) bundle.env = self bundles = [] @@ -421,10 +426,9 @@ def register_decomposed(self, name, bundle, abspath): # The output might also contain `%(version)s` so I can't use # the C-style method of string formatting output = ( - bundle.output - .replace('%(name)s', filename) - .replace('%(path)s', filepath) - .replace('%(ext)s', fileext.strip('.')) + bundle.output.replace("%(name)s", filename) + .replace("%(path)s", filepath) + .replace("%(ext)s", fileext.strip(".")) ) new_bundle = Bundle( relpath, @@ -444,9 +448,19 @@ def register_decomposed(self, name, bundle, abspath): # their own namespacing, so they don't need to be prefixed. For example, a # filter setting might be CSSMIN_BIN. env_options = [ - 'directory', 'url', 'debug', 'cache', 'updater', 'auto_build', - 'url_expire', 'versions', 'manifest', 'load_path', 'url_mapping', - 'cache_file_mode' ] + "directory", + "url", + "debug", + "cache", + "updater", + "auto_build", + "url_expire", + "versions", + "manifest", + "load_path", + "url_mapping", + "cache_file_mode", +] class ConfigurationContext(object): @@ -471,11 +485,15 @@ def append_path(self, path, url=None): self.url_mapping[path] = url def _set_debug(self, debug): - self._storage['debug'] = debug + self._storage["debug"] = debug + def _get_debug(self): - return self._storage['debug'] - debug = property(_get_debug, _set_debug, doc= - """Enable/disable debug mode. Possible values are: + return self._storage["debug"] + + debug = property( + _get_debug, + _set_debug, + doc="""Enable/disable debug mode. Possible values are: ``False`` Production mode. Bundles will be merged and filters applied. @@ -484,14 +502,19 @@ def _get_debug(self): files. *"merge"* Merge the source files, but do not apply filters. - """) + """, + ) def _set_cache_file_mode(self, mode): - self._storage['cache_file_mode'] = mode + self._storage["cache_file_mode"] = mode + def _get_cache_file_mode(self): - return self._storage['cache_file_mode'] - cache_file_mode = property(_get_cache_file_mode, _set_cache_file_mode, doc= - """Controls the mode of files created in the cache. The default mode + return self._storage["cache_file_mode"] + + cache_file_mode = property( + _get_cache_file_mode, + _set_cache_file_mode, + doc="""Controls the mode of files created in the cache. The default mode is 0600. Follows standard unix mode. Possible values are any unix mode, e.g.: @@ -501,17 +524,22 @@ def _get_cache_file_mode(self): ``0666`` Enable world read+write bits - """) + """, + ) def _set_cache(self, enable): - self._storage['cache'] = enable + self._storage["cache"] = enable + def _get_cache(self): - cache = get_cache(self._storage['cache'], self) - if cache != self._storage['cache']: - self._storage['cache'] = cache + cache = get_cache(self._storage["cache"], self) + if cache != self._storage["cache"]: + self._storage["cache"] = cache return cache - cache = property(_get_cache, _set_cache, doc= - """Controls the behavior of the cache. The cache will speed up rebuilding + + cache = property( + _get_cache, + _set_cache, + doc="""Controls the behavior of the cache. The cache will speed up rebuilding of your bundles, by caching individual filter results. This can be particularly useful while developing, if your bundles would otherwise take a long time to rebuild. @@ -527,14 +555,19 @@ def _get_cache(self): *custom path* Use the given directory as the cache directory. - """) + """, + ) def _set_auto_build(self, value): - self._storage['auto_build'] = value + self._storage["auto_build"] = value + def _get_auto_build(self): - return self._storage['auto_build'] - auto_build = property(_get_auto_build, _set_auto_build, doc= - """Controls whether bundles should be automatically built, and + return self._storage["auto_build"] + + auto_build = property( + _get_auto_build, + _set_auto_build, + doc="""Controls whether bundles should be automatically built, and rebuilt, when required (if set to ``True``), or whether they must be built manually be the user, for example via a management command. @@ -546,17 +579,22 @@ def _get_auto_build(self): process takes very long, then you may want to disable this. By default automatic building is enabled. - """) + """, + ) def _set_manifest(self, manifest): - self._storage['manifest'] = manifest + self._storage["manifest"] = manifest + def _get_manifest(self): - manifest = get_manifest(self._storage['manifest'], env=self) - if manifest != self._storage['manifest']: - self._storage['manifest'] = manifest + manifest = get_manifest(self._storage["manifest"], env=self) + if manifest != self._storage["manifest"]: + self._storage["manifest"] = manifest return manifest - manifest = property(_get_manifest, _set_manifest, doc= - """A manifest persists information about the versions bundles + + manifest = property( + _get_manifest, + _set_manifest, + doc="""A manifest persists information about the versions bundles are at. The Manifest plays a role only if you insert the bundle version @@ -596,17 +634,22 @@ def _get_manifest(self): No manifest is used. Any custom manifest implementation. - """) + """, + ) def _set_versions(self, versions): - self._storage['versions'] = versions + self._storage["versions"] = versions + def _get_versions(self): - versions = get_versioner(self._storage['versions']) - if versions != self._storage['versions']: - self._storage['versions'] = versions + versions = get_versioner(self._storage["versions"]) + if versions != self._storage["versions"]: + self._storage["versions"] = versions return versions - versions = property(_get_versions, _set_versions, doc= - """Defines what should be used as a Bundle ``version``. + + versions = property( + _get_versions, + _set_versions, + doc="""Defines what should be used as a Bundle ``version``. A bundle's version is what is appended to URLs when the ``url_expire`` option is enabled, and the version can be part @@ -629,17 +672,22 @@ def _get_versions(self): Any custom version implementation. - """) + """, + ) def set_updater(self, updater): - self._storage['updater'] = updater + self._storage["updater"] = updater + def get_updater(self): - updater = get_updater(self._storage['updater']) - if updater != self._storage['updater']: - self._storage['updater'] = updater + updater = get_updater(self._storage["updater"]) + if updater != self._storage["updater"]: + self._storage["updater"] = updater return updater - updater = property(get_updater, set_updater, doc= - """Controls how the ``auto_build`` option should determine + + updater = property( + get_updater, + set_updater, + doc="""Controls how the ``auto_build`` option should determine whether a bundle needs to be rebuilt. ``"timestamp"`` (default) @@ -650,14 +698,19 @@ def get_updater(self): Always rebuild bundles (avoid in production environments). Any custom version implementation. - """) + """, + ) def _set_url_expire(self, url_expire): - self._storage['url_expire'] = url_expire + self._storage["url_expire"] = url_expire + def _get_url_expire(self): - return self._storage['url_expire'] - url_expire = property(_get_url_expire, _set_url_expire, doc= - """If you send your assets to the client using a + return self._storage["url_expire"] + + url_expire = property( + _get_url_expire, + _set_url_expire, + doc="""If you send your assets to the client using a *far future expires* header (to minimize the 304 responses your server has to send), you need to make sure that assets will be reloaded by the browser when they change. @@ -672,46 +725,59 @@ def _get_url_expire(self): The default behavior (indicated by a ``None`` value) is to add an expiry querystring if the bundle does not use a version placeholder. - """) + """, + ) def _set_directory(self, directory): - self._storage['directory'] = directory + self._storage["directory"] = directory + def _get_directory(self): try: - return path.abspath(self._storage['directory']) + return path.abspath(self._storage["directory"]) except KeyError: - raise EnvironmentError( - 'The environment has no "directory" configured') - directory = property(_get_directory, _set_directory, doc= - """The base directory to which all paths will be relative to, + raise EnvironmentError('The environment has no "directory" configured') + + directory = property( + _get_directory, + _set_directory, + doc="""The base directory to which all paths will be relative to, unless :attr:`load_path` are given, in which case this will only serve as the output directory. In the url space, it is mapped to :attr:`urls`. - """) + """, + ) def _set_url(self, url): - self._storage['url'] = url + self._storage["url"] = url + def _get_url(self): try: - return self._storage['url'] + return self._storage["url"] except KeyError: - raise EnvironmentError( - 'The environment has no "url" configured') - url = property(_get_url, _set_url, doc= - """The url prefix used to construct urls for files in + raise EnvironmentError('The environment has no "url" configured') + + url = property( + _get_url, + _set_url, + doc="""The url prefix used to construct urls for files in :attr:`directory`. To define url spaces for other directories, see :attr:`url_mapping`. - """) + """, + ) def _set_load_path(self, load_path): - self._storage['load_path'] = load_path + self._storage["load_path"] = load_path + def _get_load_path(self): - return self._storage['load_path'] - load_path = property(_get_load_path, _set_load_path, doc= - """An list of directories that will be searched for source files. + return self._storage["load_path"] + + load_path = property( + _get_load_path, + _set_load_path, + doc="""An list of directories that will be searched for source files. If this is set, source files will only be looked for in these directories, and :attr:`directory` is used as a location for @@ -727,26 +793,34 @@ def _get_load_path(self): To modify this list, you should use :meth:`append_path`, since it makes it easy to add the corresponding url prefix to :attr:`url_mapping`. - """) + """, + ) def _set_url_mapping(self, url_mapping): - self._storage['url_mapping'] = url_mapping + self._storage["url_mapping"] = url_mapping + def _get_url_mapping(self): - return self._storage['url_mapping'] - url_mapping = property(_get_url_mapping, _set_url_mapping, doc= - """A dictionary of directory -> url prefix mappings that will + return self._storage["url_mapping"] + + url_mapping = property( + _get_url_mapping, + _set_url_mapping, + doc="""A dictionary of directory -> url prefix mappings that will be considered when generating urls, in addition to the pair of :attr:`directory` and :attr:`url`, which is always active. You should use :meth:`append_path` to add directories to the load path along with their respective url spaces, instead of modifying this setting directly. - """) + """, + ) def _set_resolver(self, resolver): - self._storage['resolver'] = resolver + self._storage["resolver"] = resolver + def _get_resolver(self): - return self._storage['resolver'] + return self._storage["resolver"] + resolver = property(_get_resolver, _set_resolver) @@ -769,47 +843,50 @@ def __init__(self, **config): # - enable url_expire, because we want to encourage the right thing # - default to hash versions, for the same reason: they're better # - manifest=cache because hash versions are slow - self.config.setdefault('debug', False) - self.config.setdefault('cache', True) - self.config.setdefault('url_expire', None) - self.config.setdefault('auto_build', True) - self.config.setdefault('manifest', 'cache') - self.config.setdefault('versions', 'hash') - self.config.setdefault('updater', 'timestamp') - self.config.setdefault('load_path', []) - self.config.setdefault('url_mapping', {}) - self.config.setdefault('resolver', self.resolver_class()) - self.config.setdefault('cache_file_mode', None) + self.config.setdefault("debug", False) + self.config.setdefault("cache", True) + self.config.setdefault("url_expire", None) + self.config.setdefault("auto_build", True) + self.config.setdefault("manifest", "cache") + self.config.setdefault("versions", "hash") + self.config.setdefault("updater", "timestamp") + self.config.setdefault("load_path", []) + self.config.setdefault("url_mapping", {}) + self.config.setdefault("resolver", self.resolver_class()) + self.config.setdefault("cache_file_mode", None) self.config.update(config) @property def config(self): - """Key-value configuration. Keys are case-insensitive. - """ + """Key-value configuration. Keys are case-insensitive.""" # This is a property so that user are not tempted to assign # a custom dictionary which won't uphold our caseless semantics. return self._config class DictConfigStorage(ConfigStorage): - """Using a lower-case dict for configuration values. - """ + """Using a lower-case dict for configuration values.""" + def __init__(self, *a, **kw): self._dict = {} ConfigStorage.__init__(self, *a, **kw) + def __contains__(self, key): return self._dict.__contains__(key.lower()) + def __getitem__(self, key): key = key.lower() value = self._get_deprecated(key) if not value is None: return value return self._dict.__getitem__(key) + def __setitem__(self, key, value): key = key.lower() if not self._set_deprecated(key, value): self._dict.__setitem__(key.lower(), value) + def __delitem__(self, key): self._dict.__delitem__(key.lower()) @@ -837,11 +914,11 @@ def parse_debug_value(value): if value is None: return value value = value.lower() - if value in ('true', '1'): + if value in ("true", "1"): return True - elif value in ('false', '0'): + elif value in ("false", "0"): return False - elif value in ('merge',): - return 'merge' + elif value in ("merge",): + return "merge" else: raise ValueError() diff --git a/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py b/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py index defeb9e..73a40bc 100644 --- a/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py +++ b/pelican/plugins/webassets/vendor/webassets/ext/jinja2.py @@ -4,9 +4,9 @@ import jinja2 from jinja2.ext import Extension from jinja2 import nodes -from webassets import Bundle -from webassets.loaders import GlobLoader, LoaderError -from webassets.exceptions import ImminentDeprecationWarning +from pelican.plugins.webassets.vendor.webassets import Bundle +from pelican.plugins.webassets.vendor.webassets.loaders import GlobLoader, LoaderError +from pelican.plugins.webassets.vendor.webassets.exceptions import ImminentDeprecationWarning __all__ = ('assets', 'Jinja2Loader',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/__init__.py index 146f1a4..c6029db 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/__init__.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/__init__.py @@ -10,16 +10,16 @@ import shlex import tempfile import pkgutil -from webassets import six -from webassets.six.moves import map -from webassets.six.moves import zip +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.six.moves import map +from pelican.plugins.webassets.vendor.webassets.six.moves import zip try: frozenset except NameError: from sets import ImmutableSet as frozenset -from webassets.exceptions import FilterError -from webassets.importlib import import_module -from webassets.utils import hash_func +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.importlib import import_module +from pelican.plugins.webassets.vendor.webassets.utils import hash_func __all__ = ('Filter', 'CallableFilter', 'get_filter', 'register_filter', diff --git a/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py b/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py index a406c30..fda3466 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/autoprefixer.py @@ -1,7 +1,7 @@ from __future__ import with_statement -from webassets.filter import ExternalTool -from webassets.utils import working_directory +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.utils import working_directory class AutoprefixerFilter(ExternalTool): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/babel.py b/pelican/plugins/webassets/vendor/webassets/filter/babel.py index 9b36e13..6d111dc 100755 --- a/pelican/plugins/webassets/vendor/webassets/filter/babel.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/babel.py @@ -1,4 +1,4 @@ -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool class Babel(ExternalTool): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py b/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py index f55516c..a4d856b 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cleancss.py @@ -1,7 +1,7 @@ import os from subprocess import PIPE, Popen -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool __all__ = ('CleanCSS',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py b/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py index 37ef78e..ac6f6e9 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/clevercss.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('CleverCSS',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure.py b/pelican/plugins/webassets/vendor/webassets/filter/closure.py index 76435e2..d60ddfc 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/closure.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure.py @@ -31,7 +31,7 @@ """ from __future__ import absolute_import -from webassets.filter import JavaTool +from pelican.plugins.webassets.vendor.webassets.filter import JavaTool __all__ = ('ClosureJS',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py b/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py index 1b8a4e1..0908825 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure_stylesheets.py @@ -11,7 +11,7 @@ assume that ``java`` is on the system path. """ -from webassets.filter import JavaTool +from pelican.plugins.webassets.vendor.webassets.filter import JavaTool __all__ = ['ClosureStylesheetsCompiler', 'ClosureStylesheetsMinifier'] diff --git a/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py b/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py index b2a15fa..2e1aaab 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/closure_templates.py @@ -30,8 +30,8 @@ import os import tempfile -from webassets.exceptions import FilterError -from webassets.filter.jst import JSTemplateFilter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter.jst import JSTemplateFilter __all__ = ('ClosureTemplateFilter',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py b/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py index 9ff075d..d7e26f0 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/coffeescript.py @@ -1,8 +1,8 @@ from __future__ import print_function import os, subprocess -from webassets.filter import Filter -from webassets.exceptions import FilterError, ImminentDeprecationWarning +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError, ImminentDeprecationWarning __all__ = ('CoffeeScript',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/compass.py b/pelican/plugins/webassets/vendor/webassets/filter/compass.py index 256544f..e08e9c8 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/compass.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/compass.py @@ -32,10 +32,10 @@ import shutil import subprocess from io import open -from webassets import six +from pelican.plugins.webassets.vendor.webassets import six -from webassets.exceptions import FilterError -from webassets.filter import Filter, option +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter import Filter, option __all__ = ('Compass',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py b/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py index bf0c4a2..42d4442 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssmin.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('CSSMin',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py b/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py index 59cffbb..24774a6 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssprefixer.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('CSSPrefixer',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py index a3985e0..81c18b6 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/__init__.py @@ -1,7 +1,7 @@ import os from os.path import join -from webassets.utils import common_path_prefix -from webassets.utils import urlparse +from pelican.plugins.webassets.vendor.webassets.utils import common_path_prefix +from pelican.plugins.webassets.vendor.webassets.utils import urlparse from . import urlpath try: from collections import OrderedDict diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py index db358a7..372f8a9 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssrewrite/base.py @@ -1,8 +1,8 @@ import os import re from os.path import join, normpath -from webassets.filter import Filter -from webassets.utils import common_path_prefix +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.utils import common_path_prefix __all__ = () diff --git a/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py b/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py index 2b8f4a1..dd4a801 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/cssutils.py @@ -2,7 +2,7 @@ import logging import logging.handlers -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('CSSUtils',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/datauri.py b/pelican/plugins/webassets/vendor/webassets/filter/datauri.py index 339fed6..c93f465 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/datauri.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/datauri.py @@ -1,9 +1,9 @@ from base64 import b64encode import mimetypes import os -from webassets.utils import urlparse +from pelican.plugins.webassets.vendor.webassets.utils import urlparse -from webassets.filter.cssrewrite.base import CSSUrlRewriter +from pelican.plugins.webassets.vendor.webassets.filter.cssrewrite.base import CSSUrlRewriter __all__ = ('CSSDataUri',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/dust.py b/pelican/plugins/webassets/vendor/webassets/filter/dust.py index 4623455..bf237c8 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/dust.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/dust.py @@ -3,7 +3,7 @@ """ -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool __all__ = ('DustJS',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py b/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py index 173a92f..53e6eba 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/handlebars.py @@ -2,8 +2,8 @@ import os from os import path -from webassets.exceptions import FilterError -from webassets.filter.jst import JSTemplateFilter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter.jst import JSTemplateFilter __all__ = ('Handlebars',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jade.py b/pelican/plugins/webassets/vendor/webassets/filter/jade.py index 16bab7d..95652ff 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/jade.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/jade.py @@ -2,8 +2,8 @@ from __future__ import print_function import os, subprocess -from webassets.filter import Filter, register_filter -from webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter import Filter, register_filter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError class Jade(Filter): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py b/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py index 50a295e..282c76e 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/jinja2.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('Jinja2',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py b/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py index ff21d7f..b75523a 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/jsmin.py @@ -1,7 +1,7 @@ from __future__ import absolute_import import warnings -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('JSMin',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py index 9fc56b8..19cd51c 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/jspacker/__init__.py @@ -1,5 +1,5 @@ from .jspacker import JavaScriptPacker -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('JSPacker',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/jst.py b/pelican/plugins/webassets/vendor/webassets/filter/jst.py index 101fedc..e6e56de 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/jst.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/jst.py @@ -4,8 +4,8 @@ import json except ImportError: import simplejson as json -from webassets.filter import Filter -from webassets.utils import common_path_prefix +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.utils import common_path_prefix __all__ = ('JST',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/less.py b/pelican/plugins/webassets/vendor/webassets/filter/less.py index c228e3e..4a9e550 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/less.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/less.py @@ -2,8 +2,8 @@ import os -from webassets.filter import ExternalTool -from webassets.utils import working_directory +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.utils import working_directory class Less(ExternalTool): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py b/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py index b85e8cc..bf45f12 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/less_ruby.py @@ -2,8 +2,8 @@ import os, subprocess import tempfile -from webassets.filter import Filter -from webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError __all__ = ('Less',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/libsass.py b/pelican/plugins/webassets/vendor/webassets/filter/libsass.py index 6867be3..13a6c4a 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/libsass.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/libsass.py @@ -9,7 +9,7 @@ from __future__ import print_function from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('LibSass',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py b/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py index cbb38a0..ac0d4d5 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/node_sass.py @@ -1,7 +1,7 @@ import os import subprocess -from webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError from .sass import Sass diff --git a/pelican/plugins/webassets/vendor/webassets/filter/postcss.py b/pelican/plugins/webassets/vendor/webassets/filter/postcss.py index 479402b..9b040aa 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/postcss.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/postcss.py @@ -1,7 +1,7 @@ from __future__ import with_statement -from webassets.filter import ExternalTool -from webassets.utils import working_directory +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.utils import working_directory class PostCSS(ExternalTool): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py b/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py index cfa6637..b2abfab 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/pyscss.py @@ -1,7 +1,7 @@ import os -from webassets.filter import Filter -from webassets.utils import working_directory +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.utils import working_directory __all__ = ('PyScss',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py b/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py index b76325c..cb6aa89 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/rcssmin.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('RCSSMin',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/replace.py b/pelican/plugins/webassets/vendor/webassets/filter/replace.py index 829face..ee1444b 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/replace.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/replace.py @@ -1,5 +1,5 @@ import re -from webassets.filter import ( +from pelican.plugins.webassets.vendor.webassets.filter import ( Filter, register_filter ) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py b/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py index 536a0e3..3d9a142 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/requirejs.py @@ -3,7 +3,7 @@ import shlex from os import path, getcwd -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool class RequireJSFilter(ExternalTool): diff --git a/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py index ff32865..cab89bf 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/rjsmin/__init__.py @@ -5,7 +5,7 @@ from . import rjsmin -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('RJSMin',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/sass.py b/pelican/plugins/webassets/vendor/webassets/filter/sass.py index 02854d6..1220b55 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/sass.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/sass.py @@ -2,7 +2,7 @@ import os -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool __all__ = ('Sass', 'SCSS') @@ -28,7 +28,7 @@ class Sass(ExternalTool): To use Sass as an output filter:: - from webassets.filter import get_filter + from pelican.plugins.webassets.vendor.webassets.filter import get_filter sass = get_filter('sass', as_output=True) Bundle(...., filters=(sass,)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py b/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py index 63b07b0..e08ec1b 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/sass_ruby.py @@ -1,8 +1,8 @@ from __future__ import print_function import os, subprocess -from webassets.filter import ExternalTool -from webassets.cache import FilesystemCache +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.cache import FilesystemCache __all__ = ('RubySass', 'RubySCSS') @@ -31,7 +31,7 @@ class RubySass(ExternalTool): To use Sass as an output filter:: - from webassets.filter import get_filter + from pelican.plugins.webassets.vendor.webassets.filter import get_filter sass = get_filter('sass', as_output=True) Bundle(...., filters=(sass,)) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/slimit.py b/pelican/plugins/webassets/vendor/webassets/filter/slimit.py index 0b8fae0..c10fad4 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/slimit.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/slimit.py @@ -1,5 +1,5 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('Slimit',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py b/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py index 7e98bfd..d9f60b7 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/slimmer.py @@ -1,6 +1,6 @@ from __future__ import absolute_import -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter __all__ = ('CSSSlimmer',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py b/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py index 8ee8465..e4c00eb 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/spritemapper.py @@ -1,8 +1,8 @@ from __future__ import print_function from __future__ import absolute_import -from webassets.six import StringIO +from pelican.plugins.webassets.vendor.webassets.six import StringIO from contextlib import contextmanager -from webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.filter import Filter try: from spritecss.main import CSSFile diff --git a/pelican/plugins/webassets/vendor/webassets/filter/stylus.py b/pelican/plugins/webassets/vendor/webassets/filter/stylus.py index 63d9205..558c22c 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/stylus.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/stylus.py @@ -1,5 +1,5 @@ import os -from webassets.filter import ExternalTool, option +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool, option __all__ = ('Stylus',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/typescript.py b/pelican/plugins/webassets/vendor/webassets/filter/typescript.py index bed10ae..a136826 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/typescript.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/typescript.py @@ -3,8 +3,8 @@ import tempfile from io import open # Give 2 and 3 use same newline behaviour. -from webassets.filter import Filter -from webassets.exceptions import FilterError +from pelican.plugins.webassets.vendor.webassets.filter import Filter +from pelican.plugins.webassets.vendor.webassets.exceptions import FilterError __all__ = ('TypeScript',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py b/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py index 7e35255..7193c2e 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/uglifyjs.py @@ -1,4 +1,4 @@ -from webassets.filter import ExternalTool +from pelican.plugins.webassets.vendor.webassets.filter import ExternalTool __all__ = ('UglifyJS',) diff --git a/pelican/plugins/webassets/vendor/webassets/filter/yui.py b/pelican/plugins/webassets/vendor/webassets/filter/yui.py index 0c67de4..0022abf 100644 --- a/pelican/plugins/webassets/vendor/webassets/filter/yui.py +++ b/pelican/plugins/webassets/vendor/webassets/filter/yui.py @@ -17,7 +17,7 @@ on the system path. """ -from webassets.filter import JavaTool +from pelican.plugins.webassets.vendor.webassets.filter import JavaTool __all__ = ('YUIJS', 'YUICSS',) diff --git a/pelican/plugins/webassets/vendor/webassets/loaders.py b/pelican/plugins/webassets/vendor/webassets/loaders.py index 500ab57..b06dc1b 100644 --- a/pelican/plugins/webassets/vendor/webassets/loaders.py +++ b/pelican/plugins/webassets/vendor/webassets/loaders.py @@ -9,18 +9,18 @@ import glob, fnmatch import inspect import types -from webassets import six +from pelican.plugins.webassets.vendor.webassets import six try: import yaml except ImportError: pass -from webassets import six -from webassets import Environment -from webassets.bundle import Bundle -from webassets.exceptions import EnvironmentError -from webassets.filter import register_filter -from webassets.importlib import import_module +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets import Environment +from pelican.plugins.webassets.vendor.webassets.bundle import Bundle +from pelican.plugins.webassets.vendor.webassets.exceptions import EnvironmentError +from pelican.plugins.webassets.vendor.webassets.filter import register_filter +from pelican.plugins.webassets.vendor.webassets.importlib import import_module __all__ = ('Loader', 'LoaderError', 'PythonLoader', 'YAMLLoader', @@ -190,7 +190,7 @@ def load_environment(self): Sample usage:: - from webassets.loaders import YAMLLoader + from pelican.plugins.webassets.vendor.webassets.loaders import YAMLLoader loader = YAMLLoader('asset.yml') env = loader.load_environment() diff --git a/pelican/plugins/webassets/vendor/webassets/merge.py b/pelican/plugins/webassets/vendor/webassets/merge.py index 3d70bff..9c19271 100644 --- a/pelican/plugins/webassets/vendor/webassets/merge.py +++ b/pelican/plugins/webassets/vendor/webassets/merge.py @@ -11,8 +11,8 @@ from urllib2 import HTTPError import logging from io import open -from webassets import six -from webassets.six.moves import filter +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.six.moves import filter from .utils import cmp_debug_levels, StringIO, hash_func diff --git a/pelican/plugins/webassets/vendor/webassets/script.py b/pelican/plugins/webassets/vendor/webassets/script.py index 102a421..5a986ce 100644 --- a/pelican/plugins/webassets/vendor/webassets/script.py +++ b/pelican/plugins/webassets/vendor/webassets/script.py @@ -1,25 +1,26 @@ from __future__ import print_function + +import logging +import os import shutil -import os, sys +import sys import time -import logging - -from webassets.loaders import PythonLoader, YAMLLoader -from webassets.bundle import get_all_bundle_files -from webassets.exceptions import BuildError -from webassets.updater import TimestampUpdater -from webassets.merge import MemoryHunk -from webassets.version import get_manifest -from webassets.cache import FilesystemCache -from webassets.utils import set, StringIO +from pelican.plugins.webassets.vendor.webassets.bundle import get_all_bundle_files +from pelican.plugins.webassets.vendor.webassets.cache import FilesystemCache +from pelican.plugins.webassets.vendor.webassets.exceptions import BuildError +from pelican.plugins.webassets.vendor.webassets.loaders import PythonLoader, YAMLLoader +from pelican.plugins.webassets.vendor.webassets.merge import MemoryHunk +from pelican.plugins.webassets.vendor.webassets.updater import TimestampUpdater +from pelican.plugins.webassets.vendor.webassets.utils import StringIO, set +from pelican.plugins.webassets.vendor.webassets.version import get_manifest -__all__ = ('CommandError', 'CommandLineEnvironment', 'main') +__all__ = ("CommandError", "CommandLineEnvironment", "main") # logging has WARNING as default level, for the CLI we want INFO. Set this # as early as possible, so that user customizations will not be overwritten. -logging.getLogger('webassets.script').setLevel(logging.INFO) +logging.getLogger("webassets.script").setLevel(logging.INFO) class CommandError(Exception): @@ -45,9 +46,15 @@ def __call__(self, *args, **kwargs): class BuildCommand(Command): - - def __call__(self, bundles=None, output=None, directory=None, no_cache=None, - manifest=None, production=None): + def __call__( + self, + bundles=None, + output=None, + directory=None, + no_cache=None, + manifest=None, + production=None, + ): """Build assets. ``bundles`` @@ -84,12 +91,15 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, # Validate arguments if bundles and output: raise CommandError( - 'When specifying explicit output filenames you must ' - 'do so for all bundles you want to build.') + "When specifying explicit output filenames you must " + "do so for all bundles you want to build." + ) if directory and output: - raise CommandError('A custom output directory cannot be ' - 'combined with explicit output filenames ' - 'for individual bundles.') + raise CommandError( + "A custom output directory cannot be " + "combined with explicit output filenames " + "for individual bundles." + ) if production: # TODO: Reset again (refactor commands to be classes) @@ -104,7 +114,8 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, # abspath() is important, or this will be considered # relative to Environment.directory. "file:%s" % os.path.abspath(manifest), - env=self.environment) + env=self.environment, + ) self.environment.manifest = manifest # Use output as a dict. @@ -115,14 +126,16 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, bundle_names = bundles if bundles else (output.keys() if output else []) for name in bundle_names: if not name in self.environment: - raise CommandError( - 'I do not know a bundle name named "%s".' % name) + raise CommandError('I do not know a bundle name named "%s".' % name) # Make a list of bundles to build, and the filename to write to. if bundle_names: # TODO: It's not ok to use an internal property here. - bundles = [(n,b) for n, b in self.environment._named_bundles.items() - if n in bundle_names] + bundles = [ + (n, b) + for n, b in self.environment._named_bundles.items() + if n in bundle_names + ] else: # Includes unnamed bundles as well. bundles = [(None, b) for b in self.environment] @@ -130,8 +143,8 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, # Determine common prefix for use with ``directory`` option. if directory: prefix = os.path.commonprefix( - [os.path.normpath(b.resolve_output()) - for _, b in bundles if b.output]) + [os.path.normpath(b.resolve_output()) for _, b in bundles if b.output] + ) # dirname() gives the right value for a single file. prefix = os.path.dirname(prefix) @@ -142,18 +155,24 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, # otherwise occur. if bundle.is_container and directory: raise CommandError( - 'A custom output directory cannot currently be ' - 'used with container bundles.') + "A custom output directory cannot currently be " + "used with container bundles." + ) # Determine which filename to use, if not the default. overwrite_filename = None if output: overwrite_filename = output[name] elif directory: - offset = os.path.normpath( - bundle.resolve_output())[len(prefix)+1:] + offset = os.path.normpath(bundle.resolve_output())[len(prefix) + 1 :] overwrite_filename = os.path.join(directory, offset) - to_build.append((bundle, overwrite_filename, name,)) + to_build.append( + ( + bundle, + overwrite_filename, + name, + ) + ) # Build. built = [] @@ -161,8 +180,10 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, if name: # A name is not necessary available of the bundle was # registered without one. - self.log.info("Building bundle: %s (to %s)" % ( - name, overwrite_filename or bundle.output)) + self.log.info( + "Building bundle: %s (to %s)" + % (name, overwrite_filename or bundle.output) + ) else: self.log.info("Building bundle: %s" % bundle.output) @@ -179,8 +200,7 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, # anyway. output = StringIO() with bundle.bind(self.environment): - bundle.build(force=True, output=output, - disable_cache=no_cache) + bundle.build(force=True, output=output, disable_cache=no_cache) if directory: # Only auto-create directories in this mode. output_dir = os.path.dirname(overwrite_filename) @@ -191,13 +211,12 @@ def __call__(self, bundles=None, output=None, directory=None, no_cache=None, except BuildError as e: self.log.error("Failed, error was: %s" % e) if len(built): - self.event_handlers['post_build']() + self.event_handlers["post_build"]() if len(built) != len(to_build): return 2 class WatchCommand(Command): - def __call__(self, loop=None): """Watch assets for changes. @@ -213,18 +232,17 @@ def __call__(self, loop=None): # Before starting to watch for changes, also recognize changes # made while we did not run, and apply those immediately. for bundle in self.environment: - print('Bringing up to date: %s' % bundle.output) + print("Bringing up to date: %s" % bundle.output) bundle.build(force=False) - self.log.info("Watching %d bundles for changes..." % - len(self.environment)) + self.log.info("Watching %d bundles for changes..." % len(self.environment)) while True: changed_bundles = self.check_for_changes(mtimes) built = [] for bundle in changed_bundles: - print("Building bundle: %s ..." % bundle.output, end=' ') + print("Building bundle: %s ..." % bundle.output, end=" ") sys.stdout.flush() try: bundle.build(force=True) @@ -236,7 +254,7 @@ def __call__(self, loop=None): print("done") if len(built): - self.event_handlers['post_build']() + self.event_handlers["post_build"]() do_end = loop() if loop else time.sleep(0.1) if do_end: @@ -267,6 +285,7 @@ def check_for_changes(self, mtimes): # EnvironmentError is what the hooks is allowed to # raise for a temporary problem, like an invalid config import traceback + traceback.print_exc() # Don't update anything, wait for another change bundles_to_update = set() @@ -288,11 +307,9 @@ def yield_files_to_watch(self): class CleanCommand(Command): - def __call__(self): - """Delete generated assets. - """ - self.log.info('Cleaning generated assets...') + """Delete generated assets.""" + self.log.info("Cleaning generated assets...") for bundle in self.environment: if not bundle.output: continue @@ -305,7 +322,6 @@ def __call__(self): class CheckCommand(Command): - def __call__(self): """Check to see if assets need to be rebuilt. @@ -316,12 +332,12 @@ def __call__(self): needsupdate = False updater = self.environment.updater if not updater: - self.log.debug('no updater configured, using TimestampUpdater') + self.log.debug("no updater configured, using TimestampUpdater") updater = TimestampUpdater() for bundle in self.environment: - self.log.info('Checking asset: %s', bundle.output) + self.log.info("Checking asset: %s", bundle.output) if updater.needs_rebuild(bundle, self.environment): - self.log.info(' needs update') + self.log.info(" needs update") needsupdate = True if needsupdate: sys.exit(-1) @@ -339,7 +355,7 @@ def __init__(self, env, log, post_build=None, commands=None): self.log = log self.event_handlers = dict(post_build=lambda: True) if callable(post_build): - self.event_handlers['post_build'] = post_build + self.event_handlers["post_build"] = post_build # Instantiate each command command_def = self.DefaultCommands.copy() @@ -350,8 +366,7 @@ def __init__(self, env, log, post_build=None, commands=None): continue if not isinstance(construct, (list, tuple)): construct = [construct, (), {}] - self.commands[name] = construct[0]( - self, *construct[1], **construct[2]) + self.commands[name] = construct[0](self, *construct[1], **construct[2]) def __getattr__(self, item): # Allow method-like access to commands. @@ -368,16 +383,16 @@ def invoke(self, command, args): try: function = self.commands[command] except KeyError as e: - raise CommandError('unknown command: %s' % e) + raise CommandError("unknown command: %s" % e) else: return function(**args) # List of commands installed DefaultCommands = { - 'build': BuildCommand, - 'watch': WatchCommand, - 'clean': CleanCommand, - 'check': CheckCommand + "build": BuildCommand, + "watch": WatchCommand, + "clean": CleanCommand, + "check": CheckCommand, } @@ -403,7 +418,7 @@ def yield_files_to_watch(self): for result in WatchCommand.yield_files_to_watch(self): yield result # If the config changes, rebuild all bundles - if getattr(self.ns, 'config', None): + if getattr(self.ns, "config", None): yield self.ns.config, self.reload_config def reload_config(self): @@ -413,14 +428,14 @@ def reload_config(self): raise EnvironmentError(e) return True - def __init__(self, env=None, log=None, prog=None, no_global_options=False): try: import argparse except ImportError: raise RuntimeError( - 'The webassets command line now requires the ' - '"argparse" library on Python versions <= 2.6.') + "The webassets command line now requires the " + '"argparse" library on Python versions <= 2.6.' + ) else: self.argparse = argparse self.env = env @@ -429,66 +444,90 @@ def __init__(self, env=None, log=None, prog=None, no_global_options=False): def _construct_parser(self, prog=None, no_global_options=False): self.parser = parser = self.argparse.ArgumentParser( - description="Manage assets.", - prog=prog) + description="Manage assets.", prog=prog + ) if not no_global_options: # Start with the base arguments that are valid for any command. # XXX: Add those to the subparser? - parser.add_argument("-v", dest="verbose", action="store_true", - help="be verbose") - parser.add_argument("-q", action="store_true", dest="quiet", - help="be quiet") + parser.add_argument( + "-v", dest="verbose", action="store_true", help="be verbose" + ) + parser.add_argument( + "-q", action="store_true", dest="quiet", help="be quiet" + ) if self.env is None: loadenv = parser.add_mutually_exclusive_group() - loadenv.add_argument("-c", "--config", dest="config", - help="read environment from a YAML file") - loadenv.add_argument("-m", "--module", dest="module", - help="read environment from a Python module") + loadenv.add_argument( + "-c", + "--config", + dest="config", + help="read environment from a YAML file", + ) + loadenv.add_argument( + "-m", + "--module", + dest="module", + help="read environment from a Python module", + ) # Add subparsers. - subparsers = parser.add_subparsers(dest='command') + subparsers = parser.add_subparsers(dest="command") for command in CommandLineEnvironment.DefaultCommands.keys(): command_parser = subparsers.add_parser(command) - maker = getattr(self, 'make_%s_parser' % command, False) + maker = getattr(self, "make_%s_parser" % command, False) if maker: maker(command_parser) @staticmethod def make_build_parser(parser): parser.add_argument( - 'bundles', nargs='*', metavar='BUNDLE', - help='Optional bundle names to process. If none are ' - 'specified, then all known bundles will be built.') + "bundles", + nargs="*", + metavar="BUNDLE", + help="Optional bundle names to process. If none are " + "specified, then all known bundles will be built.", + ) parser.add_argument( - '--output', '-o', nargs=2, action='append', - metavar=('BUNDLE', 'FILE'), - help='Build the given bundle, and use a custom output ' - 'file. Can be given multiple times.') + "--output", + "-o", + nargs=2, + action="append", + metavar=("BUNDLE", "FILE"), + help="Build the given bundle, and use a custom output " + "file. Can be given multiple times.", + ) parser.add_argument( - '--directory', '-d', - help='Write built files to this directory, using the ' - 'basename defined by the bundle. Will offset ' - 'the original bundle output paths on their common ' - 'prefix. Cannot be used with --output.') + "--directory", + "-d", + help="Write built files to this directory, using the " + "basename defined by the bundle. Will offset " + "the original bundle output paths on their common " + "prefix. Cannot be used with --output.", + ) parser.add_argument( - '--no-cache', action='store_true', - help='Do not use a cache that might be configured.') + "--no-cache", + action="store_true", + help="Do not use a cache that might be configured.", + ) parser.add_argument( - '--manifest', - help='Write a manifest to the given file. Also supports ' - 'the id:arg format, if you want to use a different ' - 'manifest implementation.') + "--manifest", + help="Write a manifest to the given file. Also supports " + "the id:arg format, if you want to use a different " + "manifest implementation.", + ) parser.add_argument( - '--production', action='store_true', - help='Forcably turn off debug mode for the build. This ' - 'only has an effect if debug is set to "merge".') + "--production", + action="store_true", + help="Forcably turn off debug mode for the build. This " + 'only has an effect if debug is set to "merge".', + ) def _setup_logging(self, ns): if self.log: log = self.log else: - log = logging.getLogger('webassets.script') + log = logging.getLogger("webassets.script") if not log.handlers: # In theory, this could run multiple times (e.g. tests) handler = logging.StreamHandler() @@ -497,8 +536,11 @@ def _setup_logging(self, ns): # better than the logger level, since this is "our" handler, # we create it, for the purposes of having a default output. # The logger itself the user may be modifying. - handler.setLevel(logging.DEBUG if ns.verbose else ( - logging.WARNING if ns.quiet else logging.INFO)) + handler.setLevel( + logging.DEBUG + if ns.verbose + else (logging.WARNING if ns.quiet else logging.INFO) + ) return log def _setup_assets_env(self, ns, log): @@ -512,9 +554,11 @@ def _setup_assets_env(self, ns, log): return env def _setup_cmd_env(self, assets_env, log, ns): - return CommandLineEnvironment(assets_env, log, commands={ - 'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {}) - }) + return CommandLineEnvironment( + assets_env, + log, + commands={"watch": (GenericArgparseImplementation.WatchCommand, (ns,), {})}, + ) def _prepare_command_args(self, ns): # Prepare a dict of arguments cleaned of values that are not @@ -530,8 +574,7 @@ def run_with_ns(self, ns): log = self._setup_logging(ns) env = self._setup_assets_env(ns, log) if env is None: - raise CommandError( - "Error: No environment given or found. Maybe use -m?") + raise CommandError("Error: No environment given or found. Maybe use -m?") cmd = self._setup_cmd_env(env, log, ns) # Run the selected command @@ -578,5 +621,5 @@ def run(): sys.exit(main(sys.argv[1:]) or 0) -if __name__ == '__main__': +if __name__ == "__main__": run() diff --git a/pelican/plugins/webassets/vendor/webassets/test.py b/pelican/plugins/webassets/vendor/webassets/test.py index 359aef5..aaef75a 100644 --- a/pelican/plugins/webassets/vendor/webassets/test.py +++ b/pelican/plugins/webassets/vendor/webassets/test.py @@ -11,9 +11,9 @@ from os import path import time -from webassets import Environment, Bundle -from webassets.six.moves import map -from webassets.six.moves import zip +from pelican.plugins.webassets.vendor.webassets import Environment, Bundle +from pelican.plugins.webassets.vendor.webassets.six.moves import map +from pelican.plugins.webassets.vendor.webassets.six.moves import zip __all__ = ('TempDirHelper', 'TempEnvironmentHelper',) diff --git a/pelican/plugins/webassets/vendor/webassets/updater.py b/pelican/plugins/webassets/vendor/webassets/updater.py index 0153cc7..8734b6b 100644 --- a/pelican/plugins/webassets/vendor/webassets/updater.py +++ b/pelican/plugins/webassets/vendor/webassets/updater.py @@ -26,11 +26,11 @@ increase as using the hash to reliably determine which bundles to skip. """ -from webassets import six -from webassets.six.moves import map -from webassets.six.moves import zip -from webassets.exceptions import BundleError, BuildError -from webassets.utils import RegistryMetaclass, is_url, hash_func +from pelican.plugins.webassets.vendor.webassets import six +from pelican.plugins.webassets.vendor.webassets.six.moves import map +from pelican.plugins.webassets.vendor.webassets.six.moves import zip +from pelican.plugins.webassets.vendor.webassets.exceptions import BundleError, BuildError +from pelican.plugins.webassets.vendor.webassets.utils import RegistryMetaclass, is_url, hash_func __all__ = ('get_updater', 'SKIP_CACHE', @@ -116,7 +116,7 @@ class TimestampUpdater(BundleDefUpdater): def check_timestamps(self, bundle, ctx, o_modified=None): from .bundle import Bundle - from webassets.version import TimestampVersion + from pelican.plugins.webassets.vendor.webassets.version import TimestampVersion if not o_modified: try: @@ -146,7 +146,7 @@ def check_timestamps(self, bundle, ctx, o_modified=None): # Recurse through the bundle hierarchy. Check the timestamp of all # the bundle source files, as well as any additional # dependencies that we are supposed to watch. - from webassets.bundle import wrap + from pelican.plugins.webassets.vendor.webassets.bundle import wrap for iterator, result in ( (lambda e: map(lambda s: s[1], bundle.resolve_contents(e)), True), (bundle.resolve_depends, SKIP_CACHE) diff --git a/pelican/plugins/webassets/vendor/webassets/utils.py b/pelican/plugins/webassets/vendor/webassets/utils.py index 985f5ce..79c5d54 100644 --- a/pelican/plugins/webassets/vendor/webassets/utils.py +++ b/pelican/plugins/webassets/vendor/webassets/utils.py @@ -1,4 +1,4 @@ -from webassets import six +from pelican.plugins.webassets.vendor.webassets import six import contextlib import os import sys @@ -44,7 +44,7 @@ FileNotFoundError = FileNotFoundError -from webassets.six import StringIO +from pelican.plugins.webassets.vendor.webassets.six import StringIO try: diff --git a/pelican/plugins/webassets/vendor/webassets/version.py b/pelican/plugins/webassets/vendor/webassets/version.py index 5bb7160..6c2e897 100644 --- a/pelican/plugins/webassets/vendor/webassets/version.py +++ b/pelican/plugins/webassets/vendor/webassets/version.py @@ -6,10 +6,10 @@ import os import pickle -from webassets import six +from pelican.plugins.webassets.vendor.webassets import six -from webassets.merge import FileHunk -from webassets.utils import md5_constructor, RegistryMetaclass, is_url +from pelican.plugins.webassets.vendor.webassets.merge import FileHunk +from pelican.plugins.webassets.vendor.webassets.utils import md5_constructor, RegistryMetaclass, is_url __all__ = ('get_versioner', 'VersionIndeterminableError', @@ -89,7 +89,7 @@ def determine_version(self, bundle, ctx, hunk=None): # timestamp of the final file is the fastest way to do this. # Note that this works because of our ``save_done`` hook. if not hunk: - from webassets.bundle import has_placeholder + from pelican.plugins.webassets.vendor.webassets.bundle import has_placeholder if not has_placeholder(bundle.output): return self.get_timestamp(bundle.resolve_output(ctx)) @@ -116,7 +116,7 @@ def get_timestamp(cls, filename): @classmethod def find_recent_most_timestamp(cls, bundle, ctx): - from webassets.bundle import get_all_bundle_files + from pelican.plugins.webassets.vendor.webassets.bundle import get_all_bundle_files # Recurse through the bundle hierarchy. Check the timestamp of all # the bundle source files, as well as any additional # dependencies that we are supposed to watch. @@ -154,7 +154,7 @@ def __init__(self, length=8, hash=md5_constructor): def determine_version(self, bundle, ctx, hunk=None): if not hunk: - from webassets.bundle import has_placeholder + from pelican.plugins.webassets.vendor.webassets.bundle import has_placeholder if not has_placeholder(bundle.output): hunk = FileHunk(bundle.resolve_output(ctx)) else: From 7c3af5f659732e8b28d9570fd592d30ee75450da Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:11:26 -0700 Subject: [PATCH 03/23] Drop the webassets dependency --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3ec1762..b1d5bc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,6 @@ classifiers = [ python = ">=3.7,<4.0" pelican = ">=4.5" markdown = {version = ">=3.2", optional = true} -webassets = "^2.0" [tool.poetry.group.dev.dependencies] cssmin = "^0.2.0" From 08fa7491d520dc5ec3f21a336eb4f7e8bdfe5719 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:14:29 -0700 Subject: [PATCH 04/23] Switch to importing the vendored webassets --- pelican/plugins/webassets/webassets.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pelican/plugins/webassets/webassets.py b/pelican/plugins/webassets/webassets.py index 9d79cfd..feba2a5 100644 --- a/pelican/plugins/webassets/webassets.py +++ b/pelican/plugins/webassets/webassets.py @@ -14,6 +14,7 @@ .. _webassets: https://webassets.readthedocs.org/ """ + import logging import os @@ -22,9 +23,9 @@ logger = logging.getLogger(__name__) try: - import webassets - from webassets import Environment - from webassets.ext.jinja2 import AssetsExtension + from .vendor import webassets + from .vendor.webassets import Environment + from .vendor.webassets.ext.jinja2 import AssetsExtension except ImportError: webassets = None From 92add6381b2030d64cc0bf0b9b5a78fe8d2b1937 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:22:59 -0700 Subject: [PATCH 05/23] ci: switch to uv for CI as well as for deps --- .github/workflows/main.yml | 58 +++++++++++++------------------------- 1 file changed, 19 insertions(+), 39 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6ea3576..90b35b0 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,6 @@ env: PYTEST_ADDOPTS: "--color=yes" jobs: - test: name: Test - ${{ matrix.python-version }} runs-on: ubuntu-latest @@ -17,25 +16,17 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Install Poetry - run: pipx install poetry - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: "poetry" - cache-dependency-path: "pyproject.toml" + - name: Install uv + uses: astral-sh/setup-uv@v3 - name: Install dependencies + env: + UV_PYTHON: "python${{ matrix.python-version }}" run: | - poetry env use "${{ matrix.python-version }}" - poetry run pip install --upgrade libsass - poetry install --no-interaction + uv sync --frozen --no-sources - name: Run tests - run: poetry run invoke tests - + run: uv run invoke tests lint: name: Lint @@ -49,24 +40,17 @@ jobs: with: retry: true - - name: Install Poetry - run: pipx install poetry - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.9" - cache: "poetry" - cache-dependency-path: "pyproject.toml" + - name: Install uv + uses: astral-sh/setup-uv@v3 - name: Install dependencies + env: + UV_PYTHON: "python3.9" run: | - poetry env use "3.9" - poetry install --no-interaction + uv sync --frozen --no-sources - name: Run linters - run: poetry run invoke lint --diff - + run: uv run invoke lint --diff deploy: name: Deploy @@ -84,27 +68,23 @@ jobs: with: token: ${{ secrets.GH_TOKEN }} - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.9" + - name: Install uv + uses: astral-sh/setup-uv@v3 - name: Check release id: check_release run: | - python -m pip install --upgrade pip - python -m pip install autopub[github] - autopub check + uvx --with 'autopub[github]' autopub check - name: Publish if: ${{ steps.check_release.outputs.autopub_release=='true' }} env: GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} run: | - autopub prepare - autopub commit - autopub build - autopub githubrelease + uvx --with 'autopub[github]' autopub prepare + uvx --with 'autopub[github]' autopub commit + uvx --with 'autopub[github]' autopub build + uvx --with 'autopub[github]' autopub githubrelease - name: Upload package to PyPI if: ${{ steps.check_release.outputs.autopub_release=='true' }} From 3e1c4b3207805c219f3e0b5cb9603156c4ceca63 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:26:44 -0700 Subject: [PATCH 06/23] ci: set up uv to build The motivation here is that whatever Poetry is doing to resolve dependencies is finding a dead docutils. I gave up on the tool, because I couldn't find a way to get the graph and find the source of the missing version. So, here we go. --- pyproject.toml | 95 ++++--- uv.lock | 733 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 783 insertions(+), 45 deletions(-) create mode 100644 uv.lock diff --git a/pyproject.toml b/pyproject.toml index b1d5bc3..8808d30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,35 @@ -[tool.poetry] +[tool.autopub] +project-name = "Web Assets" +git-username = "botpub" +git-email = "52496925+botpub@users.noreply.github.com" +append-github-contributor = true + +[tool.isort] +# Maintain compatibility with Black +profile = "black" +multi_line_output = 3 + +# Sort imports within their section independent of the import type +force_sort_within_sections = true + +# Designate "pelican" as separate import section +known_pelican = "pelican" +sections = "FUTURE,STDLIB,THIRDPARTY,PELICAN,FIRSTPARTY,LOCALFOLDER" + +[build-system] +requires = ["hatchling", "hatch-vcs", "hatch-fancy-pypi-readme>=22.8.0"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["pelican/plugins/webassets"] + +[project] name = "pelican-webassets" version = "2.0.0" description = "Pelican plugin to manage web assets such as CSS and JS files" -authors = ["Pelican Dev Team "] +authors = [ + { name ="Pelican Dev Team " }, +] license = "AGPL-3.0" readme = "README.md" keywords = ["pelican", "plugin", "webassets", "css", "js", "minimization", "compilation"] @@ -11,6 +38,7 @@ documentation = "https://docs.getpelican.com" packages = [ { include = "pelican" }, ] +requires-python = "~= 3.7" classifiers = [ "Development Status :: 5 - Production/Stable", @@ -28,50 +56,27 @@ classifiers = [ "Topic :: Software Development :: Pre-processors", "Topic :: Software Development :: User Interfaces", ] +dependencies = [ + "pelican >=4.5", +] -[tool.poetry.urls] +[project.urls] "Funding" = "https://donate.getpelican.com/" "Issue Tracker" = "https://github.com/pelican-plugins/webassets/issues" -[tool.poetry.dependencies] -python = ">=3.7,<4.0" -pelican = ">=4.5" -markdown = {version = ">=3.2", optional = true} - -[tool.poetry.group.dev.dependencies] -cssmin = "^0.2.0" -libsass = {version = ">=0.22.0", optional = true} -black = "^23" -flake8 = "^3.9" -flake8-black = "^0.3" -invoke = "^2.1" -isort = "^5.11.5" -markdown = "^3.4" -pytest = "^6.0" -pytest-cov = "^3.0" -pytest-sugar = "^0.9.7" - -[tool.poetry.extras] -markdown = ["markdown"] - -[tool.autopub] -project-name = "Web Assets" -git-username = "botpub" -git-email = "52496925+botpub@users.noreply.github.com" -append-github-contributor = true - -[tool.isort] -# Maintain compatibility with Black -profile = "black" -multi_line_output = 3 - -# Sort imports within their section independent of the import type -force_sort_within_sections = true - -# Designate "pelican" as separate import section -known_pelican = "pelican" -sections = "FUTURE,STDLIB,THIRDPARTY,PELICAN,FIRSTPARTY,LOCALFOLDER" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +[project.optional-dependencies] +dev = [ + "cssmin == 0.2.0", + "libsass", + "black == 23.3.0", + "flake8 == 3.9.2", + "flake8-black == 0.3.6", + "invoke == 2.2.0", + "isort == 5.11.5", + "markdown == 3.4.4", + "pytest == 6.2.5", + "pytest-cov == 3.0.0", + "pytest-sugar == 0.9.7",] +markdown = [ + "markdown == 3.4.4", +] diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..017ca6d --- /dev/null +++ b/uv.lock @@ -0,0 +1,733 @@ +version = 1 +requires-python = ">=3.7, <4" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/c6/53da25344e3e3a9c01095a89f16dbcda021c609ddb42dd6d7c0528236fb2/atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11", size = 14227 } + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "black" +version = "23.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typed-ast", marker = "python_full_version < '3.8' and implementation_name == 'cpython'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/36/66370f5017b100225ec4950a60caeef60201a10080da57ddb24124453fba/black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940", size = 582156 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/f4/7908f71cc71da08df1317a3619f002cbf91927fb5d3ffc7723905a2113f7/black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915", size = 1342273 }, + { url = "https://files.pythonhosted.org/packages/27/70/07aab2623cfd3789786f17e051487a41d5657258c7b1ef8f780512ffea9c/black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9", size = 2676721 }, + { url = "https://files.pythonhosted.org/packages/29/b1/b584fc863c155653963039664a592b3327b002405043b7e761b9b0212337/black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2", size = 1520336 }, + { url = "https://files.pythonhosted.org/packages/6d/b4/0f13ab7f5e364795ff82b76b0f9a4c9c50afda6f1e2feeb8b03fdd7ec57d/black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c", size = 1654611 }, + { url = "https://files.pythonhosted.org/packages/de/b4/76f152c5eb0be5471c22cd18380d31d188930377a1a57969073b89d6615d/black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c", size = 1286657 }, + { url = "https://files.pythonhosted.org/packages/d7/6f/d3832960a3b646b333b7f0d80d336a3c123012e9d9d5dba4a622b2b6181d/black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6", size = 1326112 }, + { url = "https://files.pythonhosted.org/packages/eb/a5/17b40bfd9b607b69fa726b0b3a473d14b093dcd5191ea1a1dd664eccfee3/black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b", size = 2643808 }, + { url = "https://files.pythonhosted.org/packages/69/49/7e1f0cf585b0d607aad3f971f95982cc4208fc77f92363d632d23021ee57/black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d", size = 1503287 }, + { url = "https://files.pythonhosted.org/packages/c0/53/42e312c17cfda5c8fc4b6b396a508218807a3fcbb963b318e49d3ddd11d5/black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70", size = 1638625 }, + { url = "https://files.pythonhosted.org/packages/3f/0d/81dd4194ce7057c199d4f28e4c2a885082d9d929e7a55c514b23784f7787/black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326", size = 1293585 }, + { url = "https://files.pythonhosted.org/packages/24/eb/2d2d2c27cb64cfd073896f62a952a802cd83cf943a692a2f278525b57ca9/black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b", size = 1447428 }, + { url = "https://files.pythonhosted.org/packages/49/36/15d2122f90ff1cd70f06892ebda777b650218cf84b56b5916a993dc1359a/black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2", size = 1576467 }, + { url = "https://files.pythonhosted.org/packages/ca/44/eb41edd3f558a6139f09eee052dead4a7a464e563b822ddf236f5a8ee286/black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925", size = 1226437 }, + { url = "https://files.pythonhosted.org/packages/ce/f4/2b0c6ac9e1f8584296747f66dd511898b4ebd51d6510dba118279bff53b6/black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27", size = 1331955 }, + { url = "https://files.pythonhosted.org/packages/21/14/d5a2bec5fb15f9118baab7123d344646fac0b1c6939d51c2b05259cd2d9c/black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331", size = 2658520 }, + { url = "https://files.pythonhosted.org/packages/13/0a/ed8b66c299e896780e4528eed4018f5b084da3b9ba4ee48328550567d866/black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5", size = 1509852 }, + { url = "https://files.pythonhosted.org/packages/12/4b/99c71d1cf1353edd5aff2700b8960f92e9b805c9dab72639b67dbb449d3a/black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961", size = 1641852 }, + { url = "https://files.pythonhosted.org/packages/d1/6e/5810b6992ed70403124c67e8b3f62858a32b35405177553f1a78ed6b6e31/black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8", size = 1297694 }, + { url = "https://files.pythonhosted.org/packages/13/25/cfa06788d0a936f2445af88f13604b5bcd5c9d050db618c718e6ebe66f74/black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30", size = 1341089 }, + { url = "https://files.pythonhosted.org/packages/fd/5b/fc2d7922c1a6bb49458d424b5be71d251f2d0dc97be9534e35d171bdc653/black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3", size = 2674699 }, + { url = "https://files.pythonhosted.org/packages/49/d7/f3b7da6c772800f5375aeb050a3dcf682f0bbeb41d313c9c2820d0156e4e/black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266", size = 1519946 }, + { url = "https://files.pythonhosted.org/packages/3c/d7/85f3d79f9e543402de2244c4d117793f262149e404ea0168841613c33e07/black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab", size = 1654176 }, + { url = "https://files.pythonhosted.org/packages/06/1e/273d610249f0335afb1ddb03664a03223f4826e3d1a95170a0142cb19fb4/black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb", size = 1286299 }, + { url = "https://files.pythonhosted.org/packages/ad/e7/4642b7f462381799393fbad894ba4b32db00870a797f0616c197b07129a9/black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4", size = 180965 }, +] + +[[package]] +name = "blinker" +version = "1.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/96/ed1420a974540da7419094f2553bc198c454cee5f72576e7c7629dd12d6e/blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d", size = 28092 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/2b/11bcedb7dee4923253a4a21bae3be854bcc4f06295bd827756352016d97c/blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa", size = 13398 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coverage" +version = "7.2.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/24/be01e62a7bce89bcffe04729c540382caa5a06bee45ae42136c93e2499f5/coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8", size = 200724 }, + { url = "https://files.pythonhosted.org/packages/3d/80/7060a445e1d2c9744b683dc935248613355657809d6c6b2716cdf4ca4766/coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb", size = 201024 }, + { url = "https://files.pythonhosted.org/packages/b8/9d/926fce7e03dbfc653104c2d981c0fa71f0572a9ebd344d24c573bd6f7c4f/coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6", size = 229528 }, + { url = "https://files.pythonhosted.org/packages/d1/3a/67f5d18f911abf96857f6f7e4df37ca840e38179e2cc9ab6c0b9c3380f19/coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2", size = 227842 }, + { url = "https://files.pythonhosted.org/packages/b4/bd/1b2331e3a04f4cc9b7b332b1dd0f3a1261dfc4114f8479bebfcc2afee9e8/coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063", size = 228717 }, + { url = "https://files.pythonhosted.org/packages/2b/86/3dbf9be43f8bf6a5ca28790a713e18902b2d884bc5fa9512823a81dff601/coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1", size = 234632 }, + { url = "https://files.pythonhosted.org/packages/91/e8/469ed808a782b9e8305a08bad8c6fa5f8e73e093bda6546c5aec68275bff/coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353", size = 232875 }, + { url = "https://files.pythonhosted.org/packages/29/8f/4fad1c2ba98104425009efd7eaa19af9a7c797e92d40cd2ec026fa1f58cb/coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495", size = 234094 }, + { url = "https://files.pythonhosted.org/packages/94/4e/d4e46a214ae857be3d7dc5de248ba43765f60daeb1ab077cb6c1536c7fba/coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818", size = 203184 }, + { url = "https://files.pythonhosted.org/packages/1f/e9/d6730247d8dec2a3dddc520ebe11e2e860f0f98cee3639e23de6cf920255/coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850", size = 204096 }, + { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895 }, + { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120 }, + { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178 }, + { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754 }, + { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558 }, + { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509 }, + { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924 }, + { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977 }, + { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168 }, + { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185 }, + { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020 }, + { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994 }, + { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358 }, + { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316 }, + { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159 }, + { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127 }, + { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833 }, + { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463 }, + { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347 }, + { url = "https://files.pythonhosted.org/packages/80/d7/67937c80b8fd4c909fdac29292bc8b35d9505312cff6bcab41c53c5b1df6/coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f", size = 200580 }, + { url = "https://files.pythonhosted.org/packages/7a/05/084864fa4bbf8106f44fb72a56e67e0cd372d3bf9d893be818338c81af5d/coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb", size = 226237 }, + { url = "https://files.pythonhosted.org/packages/67/a2/6fa66a50e6e894286d79a3564f42bd54a9bd27049dc0a63b26d9924f0aa3/coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9", size = 224256 }, + { url = "https://files.pythonhosted.org/packages/e2/c0/73f139794c742840b9ab88e2e17fe14a3d4668a166ff95d812ac66c0829d/coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd", size = 225550 }, + { url = "https://files.pythonhosted.org/packages/03/ec/6f30b4e0c96ce03b0e64aec46b4af2a8c49b70d1b5d0d69577add757b946/coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a", size = 232440 }, + { url = "https://files.pythonhosted.org/packages/22/c1/2f6c1b6f01a0996c9e067a9c780e1824351dbe17faae54388a4477e6d86f/coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959", size = 230897 }, + { url = "https://files.pythonhosted.org/packages/8d/d6/53e999ec1bf7498ca4bc5f3b8227eb61db39068d2de5dcc359dec5601b5a/coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02", size = 232024 }, + { url = "https://files.pythonhosted.org/packages/e9/40/383305500d24122dbed73e505a4d6828f8f3356d1f68ab6d32c781754b81/coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f", size = 203293 }, + { url = "https://files.pythonhosted.org/packages/0e/bc/7e3a31534fabb043269f14fb64e2bb2733f85d4cf39e5bbc71357c57553a/coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0", size = 204040 }, + { url = "https://files.pythonhosted.org/packages/c6/fc/be19131010930a6cf271da48202c8cc1d3f971f68c02fb2d3a78247f43dc/coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5", size = 200689 }, + { url = "https://files.pythonhosted.org/packages/28/d7/9a8de57d87f4bbc6f9a6a5ded1eaac88a89bf71369bb935dac3c0cf2893e/coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5", size = 200986 }, + { url = "https://files.pythonhosted.org/packages/c8/e4/e6182e4697665fb594a7f4e4f27cb3a4dd00c2e3d35c5c706765de8c7866/coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9", size = 230648 }, + { url = "https://files.pythonhosted.org/packages/7b/e3/f552d5871943f747165b92a924055c5d6daa164ae659a13f9018e22f3990/coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6", size = 228511 }, + { url = "https://files.pythonhosted.org/packages/44/55/49f65ccdd4dfd6d5528e966b28c37caec64170c725af32ab312889d2f857/coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e", size = 229852 }, + { url = "https://files.pythonhosted.org/packages/0d/31/340428c238eb506feb96d4fb5c9ea614db1149517f22cc7ab8c6035ef6d9/coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050", size = 235578 }, + { url = "https://files.pythonhosted.org/packages/dd/ce/97c1dd6592c908425622fe7f31c017d11cf0421729b09101d4de75bcadc8/coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5", size = 234079 }, + { url = "https://files.pythonhosted.org/packages/de/a3/5a98dc9e239d0dc5f243ef5053d5b1bdcaa1dee27a691dfc12befeccf878/coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f", size = 234991 }, + { url = "https://files.pythonhosted.org/packages/4a/fb/78986d3022e5ccf2d4370bc43a5fef8374f092b3c21d32499dee8e30b7b6/coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e", size = 203160 }, + { url = "https://files.pythonhosted.org/packages/c3/1c/6b3c9c363fb1433c79128e0d692863deb761b1b78162494abb9e5c328bc0/coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c", size = 204085 }, + { url = "https://files.pythonhosted.org/packages/88/da/495944ebf0ad246235a6bd523810d9f81981f9b81c6059ba1f56e943abe0/coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9", size = 200725 }, + { url = "https://files.pythonhosted.org/packages/ca/0c/3dfeeb1006c44b911ee0ed915350db30325d01808525ae7cc8d57643a2ce/coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2", size = 201022 }, + { url = "https://files.pythonhosted.org/packages/61/af/5964b8d7d9a5c767785644d9a5a63cacba9a9c45cc42ba06d25895ec87be/coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7", size = 229102 }, + { url = "https://files.pythonhosted.org/packages/d9/1d/cd467fceb62c371f9adb1d739c92a05d4e550246daa90412e711226bd320/coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e", size = 227441 }, + { url = "https://files.pythonhosted.org/packages/fe/57/e4f8ad64d84ca9e759d783a052795f62a9f9111585e46068845b1cb52c2b/coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1", size = 228265 }, + { url = "https://files.pythonhosted.org/packages/88/8b/b0d9fe727acae907fa7f1c8194ccb6fe9d02e1c3e9001ecf74c741f86110/coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9", size = 234217 }, + { url = "https://files.pythonhosted.org/packages/66/2e/c99fe1f6396d93551aa352c75410686e726cd4ea104479b9af1af22367ce/coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250", size = 232466 }, + { url = "https://files.pythonhosted.org/packages/bb/e9/88747b40c8fb4a783b40222510ce6d66170217eb05d7f46462c36b4fa8cc/coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2", size = 233669 }, + { url = "https://files.pythonhosted.org/packages/b1/d5/a8e276bc005e42114468d4fe03e0a9555786bc51cbfe0d20827a46c1565a/coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb", size = 203199 }, + { url = "https://files.pythonhosted.org/packages/a9/0c/4a848ae663b47f1195abcb09a951751dd61f80b503303b9b9d768e0fd321/coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27", size = 204109 }, + { url = "https://files.pythonhosted.org/packages/67/fb/b3b1d7887e1ea25a9608b0776e480e4bbc303ca95a31fd585555ec4fff5a/coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d", size = 193207 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cssmin" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/d8/dc9da69bb186303f7ab41adef0a5b6d34da2fdba006827620877760241c3/cssmin-0.2.0.tar.gz", hash = "sha256:e012f0cc8401efcf2620332339011564738ae32be8c84b2e43ce8beaec1067b6", size = 3228 } + +[[package]] +name = "docutils" +version = "0.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/53/a5da4f2c5739cf66290fac1431ee52aff6851c7c8ffd8264f13affd7bcdd/docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b", size = 2058365 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", size = 572666 }, +] + +[[package]] +name = "feedgenerator" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/4e/0efde53652edbae3f86c0ec67260bb53287edc67033ac8d00fe08cd02557/feedgenerator-2.1.0.tar.gz", hash = "sha256:f075f23f28fd227f097c36b212161c6cf012e1c6caaf7ff53d5d6bb02cd42b9d", size = 20682 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/a1/b7b1711d9bf43c3795366431633ab6ba6942744243aad809272ebfa59b39/feedgenerator-2.1.0-py3-none-any.whl", hash = "sha256:93b7ce1c5a86195cafd6a8e9baf6a2a863ebd6d9905e840ce5778f73efd9a8d5", size = 21796 }, +] + +[[package]] +name = "flake8" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/47/15b267dfe7e03dca4c4c06e7eadbd55ef4dfd368b13a0bab36d708b14366/flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", size = 164777 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/80/35a0716e5d5101e643404dabd20f07f5528a21f3ef4032d31a49c913237b/flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907", size = 73147 }, +] + +[[package]] +name = "flake8-black" +version = "0.3.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "black" }, + { name = "flake8" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/52/dfb29b60cf14ae2d5b6119733b60fb32dc1ce0b35746f53b8dcc92d21f5c/flake8-black-0.3.6.tar.gz", hash = "sha256:0dfbca3274777792a5bcb2af887a4cad72c72d0e86c94e08e3a3de151bb41c34", size = 14565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/89/0b4551179632da06da3770047fdfd927abe9e3c9f45182d216d5d177cfb3/flake8_black-0.3.6-py3-none-any.whl", hash = "sha256:fe8ea2eca98d8a504f22040d9117347f6b367458366952862ac3586e7d4eeaca", size = 9898 }, +] + +[[package]] +name = "importlib-metadata" +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.8'" }, + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/82/f6e29c8d5c098b6be61460371c2c5591f4a335923639edec43b3830650a4/importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4", size = 53569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/94/64287b38c7de4c90683630338cf28f129decbba0a44f0c6db35a873c73c4/importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5", size = 22934 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "invoke" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/42/127e6d792884ab860defc3f4d80a8f9812e48ace584ffc5a346de58cdc6c/invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5", size = 299835 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820", size = 160274 }, +] + +[[package]] +name = "isort" +version = "5.11.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/63/18cc5c2f9084d3f91ce704f2b5c8e17bedd777244e7732c21a31992b0a78/isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db", size = 187953 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/f6/c55db45970fbd14de6ab72082f1b8a143c3a69aa031c1e0dd4b9ecc8d496/isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746", size = 104094 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "libsass" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/55/14/f1d9578dce39f890ae3c0f93db8a23e89d2a1403da81d307ffb429df7c3b/libsass-0.22.0.tar.gz", hash = "sha256:3ab5ad18e47db560f4f0c09e3d28cf3bb1a44711257488ac2adad69f4f7f8425", size = 316258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/be/178b03e598e86bf1faa70f9ebe4cfad236dcf159af11a39f30ac8d7ce693/libsass-0.22.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1efc1b612299c88aec9e39d6ca0c266d360daa5b19d9430bdeaffffa86993f9", size = 9443966 }, + { url = "https://files.pythonhosted.org/packages/92/fd/73b8081c5bc2b11b61596f74b54d45226633313c2a4de53205da948fc01c/libsass-0.22.0-cp37-abi3-macosx_10_15_x86_64.whl", hash = "sha256:081e256ab3c5f3f09c7b8dea3bf3bf5e64a97c6995fd9eea880639b3f93a9f9a", size = 1064075 }, + { url = "https://files.pythonhosted.org/packages/f5/ee/844666d66a4dbb7b81e3c0257253963091f2abec8454ce7abf6f89b409f9/libsass-0.22.0-cp37-abi3-win32.whl", hash = "sha256:89c5ce497fcf3aba1dd1b19aae93b99f68257e5f2026b731b00a872f13324c7f", size = 775665 }, + { url = "https://files.pythonhosted.org/packages/43/33/ccd65ef94bf37ad01ad4f82c3426d9274eee7d448ca9c23e516d03322520/libsass-0.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:65455a2728b696b62100eb5932604aa13a29f4ac9a305d95773c14aaa7200aaf", size = 880885 }, + { url = "https://files.pythonhosted.org/packages/15/99/f0bbc4ccd254ad7b8e76fe878f11eb98a1931fa9bc46d599a52443641bff/libsass-0.22.0-cp38-abi3-macosx_14_0_arm64.whl", hash = "sha256:5fb2297a4754a6c8e25cfe5c015a3b51a2b6b9021b333f989bb8ce9d60eb5828", size = 982832 }, +] + +[[package]] +name = "markdown" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/2a/62841f4fb1fef5fa015ded48d02401cd95643ca03b6760b29437b62a04a4/Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6", size = 324459 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/b5/228c1cdcfe138f1a8e01ab1b54284c8b83735476cb22b6ba251656ed13ad/Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941", size = 94174 }, +] + +[[package]] +name = "markdown-it-py" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, + { name = "typing-extensions", marker = "python_full_version < '3.8'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/c0/59bd6d0571986f72899288a95d9d6178d0eebd70b6650f1bb3f0da90f8f7/markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1", size = 67120 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/25/2d88e8feee8e055d015343f9b86e370a1ccbec546f2865c98397aaef24af/markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", size = 84466 }, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/54/ad5eb37bf9d51800010a74e4665425831a9db4e7c4e0fde4352e391e808e/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", size = 18206 }, + { url = "https://files.pythonhosted.org/packages/6a/4a/a4d49415e600bacae038c67f9fecc1d5433b9d3c71a4de6f33537b89654c/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", size = 14079 }, + { url = "https://files.pythonhosted.org/packages/0a/7b/85681ae3c33c385b10ac0f8dd025c30af83c78cec1c37a6aa3b55e67f5ec/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", size = 26620 }, + { url = "https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", size = 25818 }, + { url = "https://files.pythonhosted.org/packages/29/fe/a36ba8c7ca55621620b2d7c585313efd10729e63ef81e4e61f52330da781/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", size = 25493 }, + { url = "https://files.pythonhosted.org/packages/60/ae/9c60231cdfda003434e8bd27282b1f4e197ad5a710c14bee8bea8a9ca4f0/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", size = 30630 }, + { url = "https://files.pythonhosted.org/packages/65/dc/1510be4d179869f5dafe071aecb3f1f41b45d37c02329dfba01ff59e5ac5/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", size = 29745 }, + { url = "https://files.pythonhosted.org/packages/30/39/8d845dd7d0b0613d86e0ef89549bfb5f61ed781f59af45fc96496e897f3a/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", size = 30021 }, + { url = "https://files.pythonhosted.org/packages/c7/5c/356a6f62e4f3c5fbf2602b4771376af22a3b16efa74eb8716fb4e328e01e/MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", size = 16659 }, + { url = "https://files.pythonhosted.org/packages/69/48/acbf292615c65f0604a0c6fc402ce6d8c991276e16c80c46a8f758fbd30c/MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", size = 17213 }, + { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, + { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, + { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, + { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, + { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, + { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, + { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, + { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, + { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, + { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, + { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, + { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, + { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, + { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, + { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, + { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, + { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, + { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, + { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, + { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, + { url = "https://files.pythonhosted.org/packages/a7/88/a940e11827ea1c136a34eca862486178294ae841164475b9ab216b80eb8e/MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", size = 13982 }, + { url = "https://files.pythonhosted.org/packages/cb/06/0d28bd178db529c5ac762a625c335a9168a7a23f280b4db9c95e97046145/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", size = 26335 }, + { url = "https://files.pythonhosted.org/packages/4a/1d/c4f5016f87ced614eacc7d5fb85b25bcc0ff53e8f058d069fc8cbfdc3c7a/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", size = 25557 }, + { url = "https://files.pythonhosted.org/packages/b3/fb/c18b8c9fbe69e347fdbf782c6478f1bc77f19a830588daa224236678339b/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", size = 25245 }, + { url = "https://files.pythonhosted.org/packages/2f/69/30d29adcf9d1d931c75001dd85001adad7374381c9c2086154d9f6445be6/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", size = 31013 }, + { url = "https://files.pythonhosted.org/packages/3a/03/63498d05bd54278b6ca340099e5b52ffb9cdf2ee4f2d9b98246337e21689/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", size = 30178 }, + { url = "https://files.pythonhosted.org/packages/68/79/11b4fe15124692f8673b603433e47abca199a08ecd2a4851bfbdc97dc62d/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", size = 30429 }, + { url = "https://files.pythonhosted.org/packages/ed/88/408bdbf292eb86f03201c17489acafae8358ba4e120d92358308c15cea7c/MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", size = 16633 }, + { url = "https://files.pythonhosted.org/packages/6c/4c/3577a52eea1880538c435176bc85e5b3379b7ab442327ccd82118550758f/MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", size = 17215 }, + { url = "https://files.pythonhosted.org/packages/f8/ff/2c942a82c35a49df5de3a630ce0a8456ac2969691b230e530ac12314364c/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", size = 18192 }, + { url = "https://files.pythonhosted.org/packages/4f/14/6f294b9c4f969d0c801a4615e221c1e084722ea6114ab2114189c5b8cbe0/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", size = 14072 }, + { url = "https://files.pythonhosted.org/packages/81/d4/fd74714ed30a1dedd0b82427c02fa4deec64f173831ec716da11c51a50aa/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", size = 26928 }, + { url = "https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", size = 26106 }, + { url = "https://files.pythonhosted.org/packages/4c/6f/f2b0f675635b05f6afd5ea03c094557bdb8622fa8e673387444fe8d8e787/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68", size = 25781 }, + { url = "https://files.pythonhosted.org/packages/51/e0/393467cf899b34a9d3678e78961c2c8cdf49fb902a959ba54ece01273fb1/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", size = 30518 }, + { url = "https://files.pythonhosted.org/packages/f6/02/5437e2ad33047290dafced9df741d9efc3e716b75583bbd73a9984f1b6f7/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", size = 29669 }, + { url = "https://files.pythonhosted.org/packages/0e/7d/968284145ffd9d726183ed6237c77938c021abacde4e073020f920e060b2/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", size = 29933 }, + { url = "https://files.pythonhosted.org/packages/bf/f3/ecb00fc8ab02b7beae8699f34db9357ae49d9f21d4d3de6f305f34fa949e/MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", size = 16656 }, + { url = "https://files.pythonhosted.org/packages/92/21/357205f03514a49b293e214ac39de01fadd0970a6e05e4bf1ddd0ffd0881/MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", size = 17206 }, + { url = "https://files.pythonhosted.org/packages/0f/31/780bb297db036ba7b7bbede5e1d7f1e14d704ad4beb3ce53fb495d22bc62/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", size = 18193 }, + { url = "https://files.pythonhosted.org/packages/6c/77/d77701bbef72892affe060cdacb7a2ed7fd68dae3b477a8642f15ad3b132/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", size = 14073 }, + { url = "https://files.pythonhosted.org/packages/d9/a7/1e558b4f78454c8a3a0199292d96159eb4d091f983bc35ef258314fe7269/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", size = 26486 }, + { url = "https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", size = 25685 }, + { url = "https://files.pythonhosted.org/packages/6a/18/ae5a258e3401f9b8312f92b028c54d7026a97ec3ab20bfaddbdfa7d8cce8/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", size = 25338 }, + { url = "https://files.pythonhosted.org/packages/0b/cc/48206bd61c5b9d0129f4d75243b156929b04c94c09041321456fd06a876d/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", size = 30439 }, + { url = "https://files.pythonhosted.org/packages/d1/06/a41c112ab9ffdeeb5f77bc3e331fdadf97fa65e52e44ba31880f4e7f983c/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", size = 29531 }, + { url = "https://files.pythonhosted.org/packages/02/8c/ab9a463301a50dab04d5472e998acbd4080597abc048166ded5c7aa768c8/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", size = 29823 }, + { url = "https://files.pythonhosted.org/packages/bc/29/9bc18da763496b055d8e98ce476c8e718dcfd78157e17f555ce6dd7d0895/MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", size = 16658 }, + { url = "https://files.pythonhosted.org/packages/f6/f8/4da07de16f10551ca1f640c92b5f316f9394088b183c6a57183df6de5ae4/MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", size = 17211 }, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f", size = 8612 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/89/479dc97e18549e21354893e4ee4ef36db1d237534982482c3681ee6e7b57/mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", size = 8556 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "packaging" +version = "24.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9", size = 147882 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488 }, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/2a/bd167cdf116d4f3539caaa4c332752aac0b3a0cc0174cdb302ee68933e81/pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3", size = 47032 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/2a/9b1be29146139ef459188f5e420a66e835dda921208db600b7037093891f/pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20", size = 29603 }, +] + +[[package]] +name = "pelican" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "docutils" }, + { name = "feedgenerator" }, + { name = "jinja2" }, + { name = "pygments" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "rich" }, + { name = "unidecode" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/37837339c0cec3b4490ab3edd98b8510af8cb46469a75055d4458a3be22c/pelican-4.8.0.tar.gz", hash = "sha256:6445c00cff2142a30592a2de046e5647b84a36c5a0cfafc0eba75abbabb2b4b1", size = 1137954 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/b9a57e22a033a9cef7f6aae8b90adb2957d45c7fa1121d9962b42320c048/pelican-4.8.0-py3-none-any.whl", hash = "sha256:c80a81930f57f9b1a11c9ab5894ce1465dcda2028c9e4e3993cf9cbf2061a57d", size = 1396992 }, +] + +[[package]] +name = "pelican-webassets" +version = "2.0.0" +source = { editable = "." } +dependencies = [ + { name = "pelican" }, +] + +[package.optional-dependencies] +dev = [ + { name = "black" }, + { name = "cssmin" }, + { name = "flake8" }, + { name = "flake8-black" }, + { name = "invoke" }, + { name = "isort" }, + { name = "libsass" }, + { name = "markdown" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-sugar" }, +] +markdown = [ + { name = "markdown" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", marker = "extra == 'dev'", specifier = "==23.3.0" }, + { name = "cssmin", marker = "extra == 'dev'", specifier = "==0.2.0" }, + { name = "flake8", marker = "extra == 'dev'", specifier = "==3.9.2" }, + { name = "flake8-black", marker = "extra == 'dev'", specifier = "==0.3.6" }, + { name = "invoke", marker = "extra == 'dev'", specifier = "==2.2.0" }, + { name = "isort", marker = "extra == 'dev'", specifier = "==5.11.5" }, + { name = "libsass", marker = "extra == 'dev'" }, + { name = "markdown", marker = "extra == 'dev'", specifier = "==3.4.4" }, + { name = "markdown", marker = "extra == 'markdown'", specifier = "==3.4.4" }, + { name = "pelican", specifier = ">=4.5" }, + { name = "pytest", marker = "extra == 'dev'", specifier = "==6.2.5" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = "==3.0.0" }, + { name = "pytest-sugar", marker = "extra == 'dev'", specifier = "==0.9.7" }, +] + +[[package]] +name = "platformdirs" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.8'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/28/e40d24d2e2eb23135f8533ad33d582359c7825623b1e022f9d460def7c05/platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731", size = 19914 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/16/70be3b725073035aa5fc3229321d06e22e73e3e09f6af78dcfdf16c7636c/platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b", size = 17562 }, +] + +[[package]] +name = "pluggy" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/42/8f2833655a29c4e9cb52ee8a2be04ceac61bcff4a680fb338cbd3d1e322d/pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3", size = 61613 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/32/4a79112b8b87b21450b066e102d6608907f4c885ed7b04c3fdb085d4d6ae/pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", size = 17695 }, +] + +[[package]] +name = "py" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708 }, +] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/b3/c832123f2699892c715fcdfebb1a8fdeffa11bb7b2350e46ecdd76b45a20/pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef", size = 103640 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/cc/227251b1471f129bc35e966bb0fceb005969023926d744139642d847b7ae/pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", size = 41725 }, +] + +[[package]] +name = "pyflakes" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/0f/0dc480da9162749bf629dca76570972dd9cce5bedc60196a3c912875c87d/pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db", size = 68567 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/11/2a745612f1d3cbbd9c69ba14b1b43a35a2f5c3c81cd0124508c52c64307f/pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", size = 68805 }, +] + +[[package]] +name = "pygments" +version = "2.17.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/55/59/8bccf4157baf25e4aa5a0bb7fa3ba8600907de105ebc22b0c78cfbf6f565/pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367", size = 4827772 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/9c/372fef8377a6e340b1704768d20daaded98bf13282b5327beb2e2fe2c7ef/pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c", size = 1179756 }, +] + +[[package]] +name = "pytest" +version = "6.2.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "atomicwrites", marker = "sys_platform == 'win32'" }, + { name = "attrs" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "py" }, + { name = "toml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/24/7d1f2d2537de114bdf1e6875115113ca80091520948d370c964b88070af2/pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", size = 1118720 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/76/86f886e750b81a4357b6ed606b2bcf0ce6d6c27ad3c09ebf63ed674fc86e/pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134", size = 280654 }, +] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/41/e046526849972555928a6d31c2068410e47a31fb5ab0a77f868596811329/pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470", size = 61440 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/49/b3e0edec68d81846f519c602ac38af9db86e1e71275528b3e814ae236063/pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6", size = 20981 }, +] + +[[package]] +name = "pytest-sugar" +version = "0.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pytest" }, + { name = "termcolor" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/18/fe569040c5796879288544b1cc98888fce1754138d54e8287ed21614491e/pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46", size = 14874 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b2/8f5d346c86e690c58da3b21b7c14d656b4100606abed8e91a98e8b50f3bf/pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062", size = 10034 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "rich" +version = "13.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, + { name = "typing-extensions", marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/76/40f084cb7db51c9d1fa29a7120717892aeda9a7711f6225692c957a93535/rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a", size = 222080 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/11/dadb85e2bd6b1f1ae56669c3e1f0410797f9605d752d68fb47b77f525b31/rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06", size = 241608 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "termcolor" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/85/147a0529b4e80b6b9d021ca8db3a820fcac53ec7374b87073d004aaf444c/termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a", size = 12163 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/e1/434566ffce04448192369c1a282931cf4ae593e91907558eaecd2e9f2801/termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", size = 6872 }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, +] + +[[package]] +name = "tomli" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, +] + +[[package]] +name = "typed-ast" +version = "1.5.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/7e/a424029f350aa8078b75fd0d360a787a273ca753a678d1104c5fa4f3072a/typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd", size = 252841 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/07/5defe18d4fc16281cd18c4374270abc430c3d852d8ac29b5db6599d45cfe/typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b", size = 223267 }, + { url = "https://files.pythonhosted.org/packages/a0/5c/e379b00028680bfcd267d845cf46b60e76d8ac6f7009fd440d6ce030cc92/typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686", size = 208260 }, + { url = "https://files.pythonhosted.org/packages/3b/99/5cc31ef4f3c80e1ceb03ed2690c7085571e3fbf119cbd67a111ec0b6622f/typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769", size = 842272 }, + { url = "https://files.pythonhosted.org/packages/e2/ed/b9b8b794b37b55c9247b1e8d38b0361e8158795c181636d34d6c11b506e7/typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04", size = 824651 }, + { url = "https://files.pythonhosted.org/packages/ca/59/dbbbe5a0e91c15d14a0896b539a5ed01326b0d468e75c1a33274d128d2d1/typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d", size = 854960 }, + { url = "https://files.pythonhosted.org/packages/90/f0/0956d925f87bd81f6e0f8cf119eac5e5c8f4da50ca25bb9f5904148d4611/typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d", size = 839321 }, + { url = "https://files.pythonhosted.org/packages/43/17/4bdece9795da6f3345c4da5667ac64bc25863617f19c28d81f350f515be6/typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02", size = 139380 }, + { url = "https://files.pythonhosted.org/packages/75/53/b685e10da535c7b3572735f8bea0d4abb35a04722a7d44ca9c163a0cf822/typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee", size = 223264 }, + { url = "https://files.pythonhosted.org/packages/96/fd/fc8ccf19fc16a40a23e7c7802d0abc78c1f38f1abb6e2447c474f8a076d8/typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18", size = 208158 }, + { url = "https://files.pythonhosted.org/packages/bf/9a/598e47f2c3ecd19d7f1bb66854d0d3ba23ffd93c846448790a92524b0a8d/typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88", size = 878366 }, + { url = "https://files.pythonhosted.org/packages/60/ca/765e8bf8b24d0ed7b9fc669f6826c5bc3eb7412fc765691f59b83ae195b2/typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2", size = 860314 }, + { url = "https://files.pythonhosted.org/packages/d9/3c/4af750e6c673a0dd6c7b9f5b5e5ed58ec51a2e4e744081781c664d369dfa/typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9", size = 898108 }, + { url = "https://files.pythonhosted.org/packages/03/8d/d0a4d1e060e1e8dda2408131a0cc7633fc4bc99fca5941dcb86c461dfe01/typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8", size = 881971 }, + { url = "https://files.pythonhosted.org/packages/90/83/f28d2c912cd010a09b3677ac69d23181045eb17e358914ab739b7fdee530/typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b", size = 139286 }, + { url = "https://files.pythonhosted.org/packages/d5/00/635353c31b71ed307ab020eff6baed9987da59a1b2ba489f885ecbe293b8/typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e", size = 222315 }, + { url = "https://files.pythonhosted.org/packages/01/95/11be104446bb20212a741d30d40eab52a9cfc05ea34efa074ff4f7c16983/typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e", size = 793541 }, + { url = "https://files.pythonhosted.org/packages/32/f1/75bd58fb1410cb72fbc6e8adf163015720db2c38844b46a9149c5ff6bf38/typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311", size = 778348 }, + { url = "https://files.pythonhosted.org/packages/47/97/0bb4dba688a58ff9c08e63b39653e4bcaa340ce1bb9c1d58163e5c2c66f1/typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2", size = 809447 }, + { url = "https://files.pythonhosted.org/packages/a8/cd/9a867f5a96d83a9742c43914e10d3a2083d8fe894ab9bf60fd467c6c497f/typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4", size = 796707 }, + { url = "https://files.pythonhosted.org/packages/eb/06/73ca55ee5303b41d08920de775f02d2a3e1e59430371f5adf7fbb1a21127/typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431", size = 138403 }, + { url = "https://files.pythonhosted.org/packages/19/e3/88b65e46643006592f39e0fdef3e29454244a9fdaa52acfb047dc68cae6a/typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a", size = 222951 }, + { url = "https://files.pythonhosted.org/packages/15/e0/182bdd9edb6c6a1c068cecaa87f58924a817f2807a0b0d940f578b3328df/typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437", size = 208247 }, + { url = "https://files.pythonhosted.org/packages/8d/09/bba083f2c11746288eaf1859e512130420405033de84189375fe65d839ba/typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede", size = 861010 }, + { url = "https://files.pythonhosted.org/packages/31/f3/38839df509b04fb54205e388fc04b47627377e0ad628870112086864a441/typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4", size = 840026 }, + { url = "https://files.pythonhosted.org/packages/45/1e/aa5f1dae4b92bc665ae9a655787bb2fe007a881fa2866b0408ce548bb24c/typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6", size = 875615 }, + { url = "https://files.pythonhosted.org/packages/94/88/71a1c249c01fbbd66f9f28648f8249e737a7fe19056c1a78e7b3b9250eb1/typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4", size = 858320 }, + { url = "https://files.pythonhosted.org/packages/12/1e/19f53aad3984e351e6730e4265fde4b949a66c451e10828fdbc4dfb050f1/typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b", size = 139414 }, + { url = "https://files.pythonhosted.org/packages/b1/88/6e7f36f5fab6fbf0586a2dd866ac337924b7d4796a4d1b2b04443a864faf/typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10", size = 223329 }, + { url = "https://files.pythonhosted.org/packages/71/30/09d27e13824495547bcc665bd07afc593b22b9484f143b27565eae4ccaac/typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814", size = 208314 }, + { url = "https://files.pythonhosted.org/packages/07/3d/564308b7a432acb1f5399933cbb1b376a1a64d2544b90f6ba91894674260/typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8", size = 840900 }, + { url = "https://files.pythonhosted.org/packages/ea/f4/262512d14f777ea3666a089e2675a9b1500a85b8329a36de85d63433fb0e/typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274", size = 823435 }, + { url = "https://files.pythonhosted.org/packages/a1/25/b3ccb948166d309ab75296ac9863ebe2ff209fbc063f1122a2d3979e47c3/typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a", size = 853125 }, + { url = "https://files.pythonhosted.org/packages/1c/09/012da182242f168bb5c42284297dcc08dc0a1b3668db5b3852aec467f56f/typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba", size = 837280 }, + { url = "https://files.pythonhosted.org/packages/30/bd/c815051404c4293265634d9d3e292f04fcf681d0502a9484c38b8f224d04/typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155", size = 139486 }, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/8b/0111dd7d6c1478bf83baa1cab85c686426c7a6274119aceb2bd9d35395ad/typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2", size = 72876 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/6b/63cc3df74987c36fe26157ee12e09e8f9db4de771e0f3404263117e75b95/typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36", size = 33232 }, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/89/19151076a006b9ac0dd37b1354e031f5297891ee507eb624755e58e10d3e/Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4", size = 192701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/b7/6ec57841fb67c98f52fc8e4a2d96df60059637cba077edc569a302a8ffc7/Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39", size = 235494 }, +] + +[[package]] +name = "zipp" +version = "3.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/27/f0ac6b846684cecce1ee93d32450c45ab607f65c2e0255f0092032d91f07/zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", size = 18454 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/fa/c9e82bbe1af6266adf08afb563905eb87cab83fde00a0a08963510621047/zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556", size = 6758 }, +] From c576747aacdb163e7972c6824f87be8e9acbf17f Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:32:09 -0700 Subject: [PATCH 07/23] Use PEP 735 dependency groups --- pyproject.toml | 2 +- uv.lock | 32 +++++++++++++++++--------------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8808d30..e010714 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ dependencies = [ "Funding" = "https://donate.getpelican.com/" "Issue Tracker" = "https://github.com/pelican-plugins/webassets/issues" -[project.optional-dependencies] +[dependency-groups] dev = [ "cssmin == 0.2.0", "libsass", diff --git a/uv.lock b/uv.lock index 017ca6d..777fdee 100644 --- a/uv.lock +++ b/uv.lock @@ -450,7 +450,7 @@ dependencies = [ { name = "pelican" }, ] -[package.optional-dependencies] +[package.dependency-groups] dev = [ { name = "black" }, { name = "cssmin" }, @@ -469,21 +469,23 @@ markdown = [ ] [package.metadata] -requires-dist = [ - { name = "black", marker = "extra == 'dev'", specifier = "==23.3.0" }, - { name = "cssmin", marker = "extra == 'dev'", specifier = "==0.2.0" }, - { name = "flake8", marker = "extra == 'dev'", specifier = "==3.9.2" }, - { name = "flake8-black", marker = "extra == 'dev'", specifier = "==0.3.6" }, - { name = "invoke", marker = "extra == 'dev'", specifier = "==2.2.0" }, - { name = "isort", marker = "extra == 'dev'", specifier = "==5.11.5" }, - { name = "libsass", marker = "extra == 'dev'" }, - { name = "markdown", marker = "extra == 'dev'", specifier = "==3.4.4" }, - { name = "markdown", marker = "extra == 'markdown'", specifier = "==3.4.4" }, - { name = "pelican", specifier = ">=4.5" }, - { name = "pytest", marker = "extra == 'dev'", specifier = "==6.2.5" }, - { name = "pytest-cov", marker = "extra == 'dev'", specifier = "==3.0.0" }, - { name = "pytest-sugar", marker = "extra == 'dev'", specifier = "==0.9.7" }, +requires-dist = [{ name = "pelican", specifier = ">=4.5" }] + +[package.metadata.dependency-groups] +dev = [ + { name = "black", specifier = "==23.3.0" }, + { name = "cssmin", specifier = "==0.2.0" }, + { name = "flake8", specifier = "==3.9.2" }, + { name = "flake8-black", specifier = "==0.3.6" }, + { name = "invoke", specifier = "==2.2.0" }, + { name = "isort", specifier = "==5.11.5" }, + { name = "libsass" }, + { name = "markdown", specifier = "==3.4.4" }, + { name = "pytest", specifier = "==6.2.5" }, + { name = "pytest-cov", specifier = "==3.0.0" }, + { name = "pytest-sugar", specifier = "==0.9.7" }, ] +markdown = [{ name = "markdown", specifier = "==3.4.4" }] [[package]] name = "platformdirs" From 44e0931547137aa168ec3aa2bb4f79f01b5dc972 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:36:12 -0700 Subject: [PATCH 08/23] Adjust imports for some of the deep tests --- tests/test_webassets.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/test_webassets.py b/tests/test_webassets.py index 0e03298..5604011 100644 --- a/tests/test_webassets.py +++ b/tests/test_webassets.py @@ -63,7 +63,9 @@ def setUp(self): def test_jinja2_ext(self): # Test that the Jinja2 extension was correctly added. - from webassets.ext.jinja2 import AssetsExtension + from pelican.plugins.webassets.vendor.webassets.ext.jinja2 import ( + AssetsExtension, + ) self.assertIn(AssetsExtension, self.settings["JINJA_ENVIRONMENT"]["extensions"]) @@ -281,7 +283,7 @@ def test_webassets_bundles(self): self.assertIn(argument, test_bundle.contents) # ensure the libsass filter is used - from webassets.filter.libsass import LibSass + from pelican.plugins.webassets.vendor.webassets.filter.libsass import LibSass self.assertIn(LibSass(), test_bundle.filters) @@ -304,7 +306,7 @@ def test_asset_bundles(self): self.assertIn(argument, test_bundle.contents) # ensure the libsass filter is used - from webassets.filter.libsass import LibSass + from pelican.plugins.webassets.vendor.webassets.filter.libsass import LibSass self.assertIn(LibSass(), test_bundle.filters) @@ -386,9 +388,11 @@ def test_after_2024(self): """Ensure the next person must remove the deprecation warnings after 2024.""" from datetime import datetime + # I am kicking this can down the road a bit because I am _not_ mixing this + # in to the vendoring :D self.assertTrue( - datetime.now().year < 2024, - "After 2 years, in the year 2024, we should remove " + datetime.now().year < 2025, + "After 3 years, in the year 2025, we should remove " "support and deprecation warnings for the ASSET_* " "configuration settings", ) From 66be838d0dea1c06d7cb68ccda83161aa5cd5a5e Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:47:36 -0700 Subject: [PATCH 09/23] Adjust dependencies and tests so they run... theoretically This is now running on my mac, so I'm optimistic :D --- pyproject.toml | 8 +++++--- tests/test_webassets.py | 1 - uv.lock | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e010714..4673e64 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ requires = ["hatchling", "hatch-vcs", "hatch-fancy-pypi-readme>=22.8.0"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] -packages = ["pelican/plugins/webassets"] +packages = ["pelican"] [project] name = "pelican-webassets" @@ -67,7 +67,7 @@ dependencies = [ [dependency-groups] dev = [ "cssmin == 0.2.0", - "libsass", + "libsass >=0.22.0", "black == 23.3.0", "flake8 == 3.9.2", "flake8-black == 0.3.6", @@ -76,7 +76,9 @@ dev = [ "markdown == 3.4.4", "pytest == 6.2.5", "pytest-cov == 3.0.0", - "pytest-sugar == 0.9.7",] + "pytest-sugar == 0.9.7", +] + markdown = [ "markdown == 3.4.4", ] diff --git a/tests/test_webassets.py b/tests/test_webassets.py index 5604011..7043090 100644 --- a/tests/test_webassets.py +++ b/tests/test_webassets.py @@ -18,7 +18,6 @@ LOGGER_NAME = "pelican.plugins.webassets" -@unittest.skipUnless(module_exists("webassets"), "webassets isn't installed") @unittest.skipUnless(module_exists("sass"), "libsass isn't installed") class TestWebAssets(unittest.TestCase): """Base class for testing webassets.""" diff --git a/uv.lock b/uv.lock index 777fdee..d4ca0d5 100644 --- a/uv.lock +++ b/uv.lock @@ -479,7 +479,7 @@ dev = [ { name = "flake8-black", specifier = "==0.3.6" }, { name = "invoke", specifier = "==2.2.0" }, { name = "isort", specifier = "==5.11.5" }, - { name = "libsass" }, + { name = "libsass", specifier = ">=0.22.0" }, { name = "markdown", specifier = "==3.4.4" }, { name = "pytest", specifier = "==6.2.5" }, { name = "pytest-cov", specifier = "==3.0.0" }, From 06970fe60f9fe491a8917523e687f0d24ab794a5 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 16:58:57 -0700 Subject: [PATCH 10/23] Exclude the vendor directory from linting (also lint with ruff :D ) --- pyproject.toml | 20 ++--- tasks.py | 39 +++------ uv.lock | 227 ++++++------------------------------------------- 3 files changed, 44 insertions(+), 242 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4673e64..9a4d1ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,17 +4,15 @@ git-username = "botpub" git-email = "52496925+botpub@users.noreply.github.com" append-github-contributor = true -[tool.isort] -# Maintain compatibility with Black -profile = "black" -multi_line_output = 3 +[tool.ruff] +exclude = ["pelican/plugins/webassets/vendor/"] -# Sort imports within their section independent of the import type -force_sort_within_sections = true +[tool.ruff.lint.isort] +section-order = ["future", "standard-library", "first-party", "pelican", "local-folder", "third-party"] # Designate "pelican" as separate import section -known_pelican = "pelican" -sections = "FUTURE,STDLIB,THIRDPARTY,PELICAN,FIRSTPARTY,LOCALFOLDER" +[tool.ruff.lint.isort.sections] +pelican = ["pelican"] [build-system] requires = ["hatchling", "hatch-vcs", "hatch-fancy-pypi-readme>=22.8.0"] @@ -68,17 +66,13 @@ dependencies = [ dev = [ "cssmin == 0.2.0", "libsass >=0.22.0", - "black == 23.3.0", - "flake8 == 3.9.2", - "flake8-black == 0.3.6", "invoke == 2.2.0", - "isort == 5.11.5", "markdown == 3.4.4", "pytest == 6.2.5", "pytest-cov == 3.0.0", "pytest-sugar == 0.9.7", + "ruff>=0.7.1", ] - markdown = [ "markdown == 3.4.4", ] diff --git a/tasks.py b/tasks.py index 7cc68a6..74edc4c 100644 --- a/tasks.py +++ b/tasks.py @@ -1,8 +1,8 @@ -from inspect import cleandoc import os +import sys +from inspect import cleandoc from pathlib import Path from shutil import which -import sys from invoke import task @@ -29,39 +29,20 @@ def tests(c): @task -def black(c, check=False, diff=False): - """Run Black auto-formatter, optionally with `--check` or `--diff`.""" - check_flag, diff_flag = "", "" - if check: - check_flag = "--check" - if diff: - diff_flag = "--diff" - c.run(f"{CMD_PREFIX}black {check_flag} {diff_flag} {PKG_PATH} tasks.py") - - -@task -def isort(c, check=False, diff=False): - """Ensure imports are sorted according to project standards.""" - check_flag, diff_flag = "", "" - if check: - check_flag = "-c" +def ruff(c, fix=False, diff=False): + """Run Ruff to ensure code meets project standards.""" + diff_flag, fix_flag = "", "" + if fix: + fix_flag = "--fix" if diff: diff_flag = "--diff" - c.run(f"{CMD_PREFIX}isort {check_flag} {diff_flag} .") - - -@task -def flake8(c): - """Check code for PEP8 compliance via Flake8.""" - c.run(f"{CMD_PREFIX}flake8 {PKG_PATH} tasks.py") + c.run(f"{CMD_PREFIX}/ruff check {diff_flag} {fix_flag} .", pty=PTY) @task -def lint(c, diff=False): +def lint(c, fix=False, diff=False): """Check code style via linting tools.""" - isort(c, check=True, diff=diff) - black(c, check=True, diff=diff) - flake8(c) + ruff(c, fix=fix, diff=diff) @task diff --git a/uv.lock b/uv.lock index d4ca0d5..dd6c75f 100644 --- a/uv.lock +++ b/uv.lock @@ -19,48 +19,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, ] -[[package]] -name = "black" -version = "23.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typed-ast", marker = "python_full_version < '3.8' and implementation_name == 'cpython'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d6/36/66370f5017b100225ec4950a60caeef60201a10080da57ddb24124453fba/black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940", size = 582156 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/f4/7908f71cc71da08df1317a3619f002cbf91927fb5d3ffc7723905a2113f7/black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915", size = 1342273 }, - { url = "https://files.pythonhosted.org/packages/27/70/07aab2623cfd3789786f17e051487a41d5657258c7b1ef8f780512ffea9c/black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9", size = 2676721 }, - { url = "https://files.pythonhosted.org/packages/29/b1/b584fc863c155653963039664a592b3327b002405043b7e761b9b0212337/black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2", size = 1520336 }, - { url = "https://files.pythonhosted.org/packages/6d/b4/0f13ab7f5e364795ff82b76b0f9a4c9c50afda6f1e2feeb8b03fdd7ec57d/black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c", size = 1654611 }, - { url = "https://files.pythonhosted.org/packages/de/b4/76f152c5eb0be5471c22cd18380d31d188930377a1a57969073b89d6615d/black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c", size = 1286657 }, - { url = "https://files.pythonhosted.org/packages/d7/6f/d3832960a3b646b333b7f0d80d336a3c123012e9d9d5dba4a622b2b6181d/black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6", size = 1326112 }, - { url = "https://files.pythonhosted.org/packages/eb/a5/17b40bfd9b607b69fa726b0b3a473d14b093dcd5191ea1a1dd664eccfee3/black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b", size = 2643808 }, - { url = "https://files.pythonhosted.org/packages/69/49/7e1f0cf585b0d607aad3f971f95982cc4208fc77f92363d632d23021ee57/black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d", size = 1503287 }, - { url = "https://files.pythonhosted.org/packages/c0/53/42e312c17cfda5c8fc4b6b396a508218807a3fcbb963b318e49d3ddd11d5/black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70", size = 1638625 }, - { url = "https://files.pythonhosted.org/packages/3f/0d/81dd4194ce7057c199d4f28e4c2a885082d9d929e7a55c514b23784f7787/black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326", size = 1293585 }, - { url = "https://files.pythonhosted.org/packages/24/eb/2d2d2c27cb64cfd073896f62a952a802cd83cf943a692a2f278525b57ca9/black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b", size = 1447428 }, - { url = "https://files.pythonhosted.org/packages/49/36/15d2122f90ff1cd70f06892ebda777b650218cf84b56b5916a993dc1359a/black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2", size = 1576467 }, - { url = "https://files.pythonhosted.org/packages/ca/44/eb41edd3f558a6139f09eee052dead4a7a464e563b822ddf236f5a8ee286/black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925", size = 1226437 }, - { url = "https://files.pythonhosted.org/packages/ce/f4/2b0c6ac9e1f8584296747f66dd511898b4ebd51d6510dba118279bff53b6/black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27", size = 1331955 }, - { url = "https://files.pythonhosted.org/packages/21/14/d5a2bec5fb15f9118baab7123d344646fac0b1c6939d51c2b05259cd2d9c/black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331", size = 2658520 }, - { url = "https://files.pythonhosted.org/packages/13/0a/ed8b66c299e896780e4528eed4018f5b084da3b9ba4ee48328550567d866/black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5", size = 1509852 }, - { url = "https://files.pythonhosted.org/packages/12/4b/99c71d1cf1353edd5aff2700b8960f92e9b805c9dab72639b67dbb449d3a/black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961", size = 1641852 }, - { url = "https://files.pythonhosted.org/packages/d1/6e/5810b6992ed70403124c67e8b3f62858a32b35405177553f1a78ed6b6e31/black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8", size = 1297694 }, - { url = "https://files.pythonhosted.org/packages/13/25/cfa06788d0a936f2445af88f13604b5bcd5c9d050db618c718e6ebe66f74/black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30", size = 1341089 }, - { url = "https://files.pythonhosted.org/packages/fd/5b/fc2d7922c1a6bb49458d424b5be71d251f2d0dc97be9534e35d171bdc653/black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3", size = 2674699 }, - { url = "https://files.pythonhosted.org/packages/49/d7/f3b7da6c772800f5375aeb050a3dcf682f0bbeb41d313c9c2820d0156e4e/black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266", size = 1519946 }, - { url = "https://files.pythonhosted.org/packages/3c/d7/85f3d79f9e543402de2244c4d117793f262149e404ea0168841613c33e07/black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab", size = 1654176 }, - { url = "https://files.pythonhosted.org/packages/06/1e/273d610249f0335afb1ddb03664a03223f4826e3d1a95170a0142cb19fb4/black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb", size = 1286299 }, - { url = "https://files.pythonhosted.org/packages/ad/e7/4642b7f462381799393fbad894ba4b32db00870a797f0616c197b07129a9/black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4", size = 180965 }, -] - [[package]] name = "blinker" version = "1.6.3" @@ -70,19 +28,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bf/2b/11bcedb7dee4923253a4a21bae3be854bcc4f06295bd827756352016d97c/blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa", size = 13398 }, ] -[[package]] -name = "click" -version = "8.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -191,35 +136,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/a1/b7b1711d9bf43c3795366431633ab6ba6942744243aad809272ebfa59b39/feedgenerator-2.1.0-py3-none-any.whl", hash = "sha256:93b7ce1c5a86195cafd6a8e9baf6a2a863ebd6d9905e840ce5778f73efd9a8d5", size = 21796 }, ] -[[package]] -name = "flake8" -version = "3.9.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, - { name = "mccabe" }, - { name = "pycodestyle" }, - { name = "pyflakes" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/47/15b267dfe7e03dca4c4c06e7eadbd55ef4dfd368b13a0bab36d708b14366/flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", size = 164777 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/80/35a0716e5d5101e643404dabd20f07f5528a21f3ef4032d31a49c913237b/flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907", size = 73147 }, -] - -[[package]] -name = "flake8-black" -version = "0.3.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "black" }, - { name = "flake8" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2f/52/dfb29b60cf14ae2d5b6119733b60fb32dc1ce0b35746f53b8dcc92d21f5c/flake8-black-0.3.6.tar.gz", hash = "sha256:0dfbca3274777792a5bcb2af887a4cad72c72d0e86c94e08e3a3de151bb41c34", size = 14565 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/89/0b4551179632da06da3770047fdfd927abe9e3c9f45182d216d5d177cfb3/flake8_black-0.3.6-py3-none-any.whl", hash = "sha256:fe8ea2eca98d8a504f22040d9117347f6b367458366952862ac3586e7d4eeaca", size = 9898 }, -] - [[package]] name = "importlib-metadata" version = "6.7.0" @@ -251,15 +167,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820", size = 160274 }, ] -[[package]] -name = "isort" -version = "5.11.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/67/63/18cc5c2f9084d3f91ce704f2b5c8e17bedd777244e7732c21a31992b0a78/isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db", size = 187953 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/f6/c55db45970fbd14de6ab72082f1b8a143c3a69aa031c1e0dd4b9ecc8d496/isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746", size = 104094 }, -] - [[package]] name = "jinja2" version = "3.1.4" @@ -377,15 +284,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/f8/4da07de16f10551ca1f640c92b5f316f9394088b183c6a57183df6de5ae4/MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", size = 17211 }, ] -[[package]] -name = "mccabe" -version = "0.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f", size = 8612 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/89/479dc97e18549e21354893e4ee4ef36db1d237534982482c3681ee6e7b57/mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", size = 8556 }, -] - [[package]] name = "mdurl" version = "0.1.2" @@ -395,15 +293,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] -[[package]] -name = "mypy-extensions" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, -] - [[package]] name = "packaging" version = "24.0" @@ -413,15 +302,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488 }, ] -[[package]] -name = "pathspec" -version = "0.11.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a0/2a/bd167cdf116d4f3539caaa4c332752aac0b3a0cc0174cdb302ee68933e81/pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3", size = 47032 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/2a/9b1be29146139ef459188f5e420a66e835dda921208db600b7037093891f/pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20", size = 29603 }, -] - [[package]] name = "pelican" version = "4.8.0" @@ -452,17 +332,14 @@ dependencies = [ [package.dependency-groups] dev = [ - { name = "black" }, { name = "cssmin" }, - { name = "flake8" }, - { name = "flake8-black" }, { name = "invoke" }, - { name = "isort" }, { name = "libsass" }, { name = "markdown" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "pytest-sugar" }, + { name = "ruff" }, ] markdown = [ { name = "markdown" }, @@ -473,32 +350,17 @@ requires-dist = [{ name = "pelican", specifier = ">=4.5" }] [package.metadata.dependency-groups] dev = [ - { name = "black", specifier = "==23.3.0" }, { name = "cssmin", specifier = "==0.2.0" }, - { name = "flake8", specifier = "==3.9.2" }, - { name = "flake8-black", specifier = "==0.3.6" }, { name = "invoke", specifier = "==2.2.0" }, - { name = "isort", specifier = "==5.11.5" }, { name = "libsass", specifier = ">=0.22.0" }, { name = "markdown", specifier = "==3.4.4" }, { name = "pytest", specifier = "==6.2.5" }, { name = "pytest-cov", specifier = "==3.0.0" }, { name = "pytest-sugar", specifier = "==0.9.7" }, + { name = "ruff", specifier = ">=0.7.1" }, ] markdown = [{ name = "markdown", specifier = "==3.4.4" }] -[[package]] -name = "platformdirs" -version = "4.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.8'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/31/28/e40d24d2e2eb23135f8533ad33d582359c7825623b1e022f9d460def7c05/platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731", size = 19914 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/16/70be3b725073035aa5fc3229321d06e22e73e3e09f6af78dcfdf16c7636c/platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b", size = 17562 }, -] - [[package]] name = "pluggy" version = "1.2.0" @@ -520,24 +382,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708 }, ] -[[package]] -name = "pycodestyle" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/b3/c832123f2699892c715fcdfebb1a8fdeffa11bb7b2350e46ecdd76b45a20/pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef", size = 103640 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/cc/227251b1471f129bc35e966bb0fceb005969023926d744139642d847b7ae/pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", size = 41725 }, -] - -[[package]] -name = "pyflakes" -version = "2.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a8/0f/0dc480da9162749bf629dca76570972dd9cce5bedc60196a3c912875c87d/pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db", size = 68567 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/11/2a745612f1d3cbbd9c69ba14b1b43a35a2f5c3c81cd0124508c52c64307f/pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", size = 68805 }, -] - [[package]] name = "pygments" version = "2.17.2" @@ -629,6 +473,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/11/dadb85e2bd6b1f1ae56669c3e1f0410797f9605d752d68fb47b77f525b31/rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06", size = 241608 }, ] +[[package]] +name = "ruff" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/21/5c6e05e0fd3fbb41be4fb92edbc9a04de70baf60adb61435ce0c6b8c3d55/ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4", size = 3181670 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/45/8a20a9920175c9c4892b2420f80ff3cf14949cf3067118e212f9acd9c908/ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89", size = 10389268 }, + { url = "https://files.pythonhosted.org/packages/1b/d3/2f8382db2cf4f9488e938602e33e36287f9d26cb283aa31f11c31297ce79/ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35", size = 10188348 }, + { url = "https://files.pythonhosted.org/packages/a2/31/7d14e2a88da351200f844b7be889a0845d9e797162cf76b136d21b832a23/ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99", size = 9841448 }, + { url = "https://files.pythonhosted.org/packages/db/99/738cafdc768eceeca0bd26c6f03e213aa91203d2278e1d95b1c31c4ece41/ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca", size = 10674864 }, + { url = "https://files.pythonhosted.org/packages/fe/12/bcf2836b50eab53c65008383e7d55201e490d75167c474f14a16e1af47d2/ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250", size = 10192105 }, + { url = "https://files.pythonhosted.org/packages/2b/71/261d5d668bf98b6c44e89bfb5dfa4cb8cb6c8b490a201a3d8030e136ea4f/ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c", size = 11194144 }, + { url = "https://files.pythonhosted.org/packages/90/1f/0926d18a3b566fa6e7b3b36093088e4ffef6b6ba4ea85a462d9a93f7e35c/ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565", size = 11917066 }, + { url = "https://files.pythonhosted.org/packages/cd/a8/9fac41f128b6a44ab4409c1493430b4ee4b11521e8aeeca19bfe1ce851f9/ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7", size = 11458821 }, + { url = "https://files.pythonhosted.org/packages/25/cd/59644168f086ab13fe4e02943b9489a0aa710171f66b178e179df5383554/ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a", size = 12700379 }, + { url = "https://files.pythonhosted.org/packages/fb/30/3bac63619eb97174661829c07fc46b2055a053dee72da29d7c304c1cd2c0/ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad", size = 11019813 }, + { url = "https://files.pythonhosted.org/packages/4b/af/f567b885b5cb3bcdbcca3458ebf210cc8c9c7a9f61c332d3c2a050c3b21e/ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112", size = 10662146 }, + { url = "https://files.pythonhosted.org/packages/bc/ad/eb930d3ad117a9f2f7261969c21559ebd82bb13b6e8001c7caed0d44be5f/ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378", size = 10256911 }, + { url = "https://files.pythonhosted.org/packages/20/d5/af292ce70a016fcec792105ca67f768b403dd480a11888bc1f418fed0dd5/ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8", size = 10767488 }, + { url = "https://files.pythonhosted.org/packages/24/85/cc04a3bd027f433bebd2a097e63b3167653c079f7f13d8f9a1178e693412/ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd", size = 11093368 }, + { url = "https://files.pythonhosted.org/packages/0b/fb/c39cbf32d1f3e318674b8622f989417231794926b573f76dd4d0ca49f0f1/ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9", size = 8594180 }, + { url = "https://files.pythonhosted.org/packages/5a/71/ec8cdea34ecb90c830ca60d54ac7b509a7b5eab50fae27e001d4470fe813/ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307", size = 9419751 }, + { url = "https://files.pythonhosted.org/packages/79/7b/884553415e9f0a9bf358ed52fb68b934e67ef6c5a62397ace924a1afdf9a/ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37", size = 8717402 }, +] + [[package]] name = "six" version = "1.16.0" @@ -665,48 +534,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, ] -[[package]] -name = "typed-ast" -version = "1.5.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/7e/a424029f350aa8078b75fd0d360a787a273ca753a678d1104c5fa4f3072a/typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd", size = 252841 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/07/5defe18d4fc16281cd18c4374270abc430c3d852d8ac29b5db6599d45cfe/typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b", size = 223267 }, - { url = "https://files.pythonhosted.org/packages/a0/5c/e379b00028680bfcd267d845cf46b60e76d8ac6f7009fd440d6ce030cc92/typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686", size = 208260 }, - { url = "https://files.pythonhosted.org/packages/3b/99/5cc31ef4f3c80e1ceb03ed2690c7085571e3fbf119cbd67a111ec0b6622f/typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769", size = 842272 }, - { url = "https://files.pythonhosted.org/packages/e2/ed/b9b8b794b37b55c9247b1e8d38b0361e8158795c181636d34d6c11b506e7/typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04", size = 824651 }, - { url = "https://files.pythonhosted.org/packages/ca/59/dbbbe5a0e91c15d14a0896b539a5ed01326b0d468e75c1a33274d128d2d1/typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d", size = 854960 }, - { url = "https://files.pythonhosted.org/packages/90/f0/0956d925f87bd81f6e0f8cf119eac5e5c8f4da50ca25bb9f5904148d4611/typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d", size = 839321 }, - { url = "https://files.pythonhosted.org/packages/43/17/4bdece9795da6f3345c4da5667ac64bc25863617f19c28d81f350f515be6/typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02", size = 139380 }, - { url = "https://files.pythonhosted.org/packages/75/53/b685e10da535c7b3572735f8bea0d4abb35a04722a7d44ca9c163a0cf822/typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee", size = 223264 }, - { url = "https://files.pythonhosted.org/packages/96/fd/fc8ccf19fc16a40a23e7c7802d0abc78c1f38f1abb6e2447c474f8a076d8/typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18", size = 208158 }, - { url = "https://files.pythonhosted.org/packages/bf/9a/598e47f2c3ecd19d7f1bb66854d0d3ba23ffd93c846448790a92524b0a8d/typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88", size = 878366 }, - { url = "https://files.pythonhosted.org/packages/60/ca/765e8bf8b24d0ed7b9fc669f6826c5bc3eb7412fc765691f59b83ae195b2/typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2", size = 860314 }, - { url = "https://files.pythonhosted.org/packages/d9/3c/4af750e6c673a0dd6c7b9f5b5e5ed58ec51a2e4e744081781c664d369dfa/typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9", size = 898108 }, - { url = "https://files.pythonhosted.org/packages/03/8d/d0a4d1e060e1e8dda2408131a0cc7633fc4bc99fca5941dcb86c461dfe01/typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8", size = 881971 }, - { url = "https://files.pythonhosted.org/packages/90/83/f28d2c912cd010a09b3677ac69d23181045eb17e358914ab739b7fdee530/typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b", size = 139286 }, - { url = "https://files.pythonhosted.org/packages/d5/00/635353c31b71ed307ab020eff6baed9987da59a1b2ba489f885ecbe293b8/typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e", size = 222315 }, - { url = "https://files.pythonhosted.org/packages/01/95/11be104446bb20212a741d30d40eab52a9cfc05ea34efa074ff4f7c16983/typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e", size = 793541 }, - { url = "https://files.pythonhosted.org/packages/32/f1/75bd58fb1410cb72fbc6e8adf163015720db2c38844b46a9149c5ff6bf38/typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311", size = 778348 }, - { url = "https://files.pythonhosted.org/packages/47/97/0bb4dba688a58ff9c08e63b39653e4bcaa340ce1bb9c1d58163e5c2c66f1/typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2", size = 809447 }, - { url = "https://files.pythonhosted.org/packages/a8/cd/9a867f5a96d83a9742c43914e10d3a2083d8fe894ab9bf60fd467c6c497f/typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4", size = 796707 }, - { url = "https://files.pythonhosted.org/packages/eb/06/73ca55ee5303b41d08920de775f02d2a3e1e59430371f5adf7fbb1a21127/typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431", size = 138403 }, - { url = "https://files.pythonhosted.org/packages/19/e3/88b65e46643006592f39e0fdef3e29454244a9fdaa52acfb047dc68cae6a/typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a", size = 222951 }, - { url = "https://files.pythonhosted.org/packages/15/e0/182bdd9edb6c6a1c068cecaa87f58924a817f2807a0b0d940f578b3328df/typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437", size = 208247 }, - { url = "https://files.pythonhosted.org/packages/8d/09/bba083f2c11746288eaf1859e512130420405033de84189375fe65d839ba/typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede", size = 861010 }, - { url = "https://files.pythonhosted.org/packages/31/f3/38839df509b04fb54205e388fc04b47627377e0ad628870112086864a441/typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4", size = 840026 }, - { url = "https://files.pythonhosted.org/packages/45/1e/aa5f1dae4b92bc665ae9a655787bb2fe007a881fa2866b0408ce548bb24c/typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6", size = 875615 }, - { url = "https://files.pythonhosted.org/packages/94/88/71a1c249c01fbbd66f9f28648f8249e737a7fe19056c1a78e7b3b9250eb1/typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4", size = 858320 }, - { url = "https://files.pythonhosted.org/packages/12/1e/19f53aad3984e351e6730e4265fde4b949a66c451e10828fdbc4dfb050f1/typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b", size = 139414 }, - { url = "https://files.pythonhosted.org/packages/b1/88/6e7f36f5fab6fbf0586a2dd866ac337924b7d4796a4d1b2b04443a864faf/typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10", size = 223329 }, - { url = "https://files.pythonhosted.org/packages/71/30/09d27e13824495547bcc665bd07afc593b22b9484f143b27565eae4ccaac/typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814", size = 208314 }, - { url = "https://files.pythonhosted.org/packages/07/3d/564308b7a432acb1f5399933cbb1b376a1a64d2544b90f6ba91894674260/typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8", size = 840900 }, - { url = "https://files.pythonhosted.org/packages/ea/f4/262512d14f777ea3666a089e2675a9b1500a85b8329a36de85d63433fb0e/typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274", size = 823435 }, - { url = "https://files.pythonhosted.org/packages/a1/25/b3ccb948166d309ab75296ac9863ebe2ff209fbc063f1122a2d3979e47c3/typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a", size = 853125 }, - { url = "https://files.pythonhosted.org/packages/1c/09/012da182242f168bb5c42284297dcc08dc0a1b3668db5b3852aec467f56f/typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba", size = 837280 }, - { url = "https://files.pythonhosted.org/packages/30/bd/c815051404c4293265634d9d3e292f04fcf681d0502a9484c38b8f224d04/typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155", size = 139486 }, -] - [[package]] name = "typing-extensions" version = "4.7.1" From d6735e21626febdd8fef3ff1def1543c1256a6bb Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Thu, 31 Oct 2024 17:01:34 -0700 Subject: [PATCH 11/23] security: don't persist git credentials in checkout --- .github/workflows/main.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 90b35b0..5a3f277 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,7 +15,8 @@ jobs: steps: - uses: actions/checkout@v3 - + with: + persist-credentials: false - name: Install uv uses: astral-sh/setup-uv@v3 @@ -34,6 +35,8 @@ jobs: steps: - uses: actions/checkout@v3 + with: + persist-credentials: false - name: Validate links in Markdown files uses: JustinBeckwith/linkinator-action@v1 @@ -67,6 +70,7 @@ jobs: - uses: actions/checkout@v3 with: token: ${{ secrets.GH_TOKEN }} + persist-credentials: false - name: Install uv uses: astral-sh/setup-uv@v3 From 8035587fd25091ffd83bae6fe923ed139b3f88a8 Mon Sep 17 00:00:00 2001 From: Justin Mayer Date: Fri, 1 Nov 2024 10:32:39 +0100 Subject: [PATCH 12/23] Use Ruff in place of other linting tools --- .gitignore | 2 +- .pre-commit-config.yaml | 21 ++++++--------------- tox.ini | 3 --- 3 files changed, 7 insertions(+), 19 deletions(-) delete mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index c04bc49..07df930 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1 @@ -poetry.lock +uv.lock diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 61cc08d..5d3bcbc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ ci: # See https://pre-commit.com/hooks.html for info on hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-ast @@ -20,18 +20,9 @@ repos: - id: forbid-new-submodules - id: trailing-whitespace - - repo: https://github.com/psf/black - rev: 23.3.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.7.1 hooks: - - id: black - - - repo: https://github.com/PyCQA/flake8 - rev: 3.9.2 - hooks: - - id: flake8 - args: [--max-line-length=88] - - - repo: https://github.com/PyCQA/isort - rev: 5.11.5 - hooks: - - id: isort + - id: ruff + - id: ruff-format + args: ["--check"] diff --git a/tox.ini b/tox.ini deleted file mode 100644 index abbd0dc..0000000 --- a/tox.ini +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -max-line-length = 88 -ignore = E203, W503 From 187117c7398f4036460e417ee473312a2e9dfc3d Mon Sep 17 00:00:00 2001 From: Justin Mayer Date: Fri, 1 Nov 2024 10:34:05 +0100 Subject: [PATCH 13/23] Add download badge to README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3f9807d..7daf4f5 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Build Status](https://img.shields.io/github/actions/workflow/status/pelican-plugins/webassets/main.yml?branch=main)](https://github.com/pelican-plugins/webassets/actions) [![PyPI Version](https://img.shields.io/pypi/v/pelican-webassets)](https://pypi.org/project/pelican-webassets/) +[![Downloads](https://img.shields.io/pypi/dm/pelican-webassets)](https://pypi.org/project/pelican-webassets/) ![License](https://img.shields.io/pypi/l/pelican-webassets?color=blue) This [Pelican](https://github.com/getpelican/pelican) plugin allows you to use From 1e18537fb496b156c51fd54fcf4f94311b530048 Mon Sep 17 00:00:00 2001 From: Justin Mayer Date: Fri, 1 Nov 2024 10:34:24 +0100 Subject: [PATCH 14/23] Update pytest-sugar dev dependency version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9a4d1ca..7d1c5b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ dev = [ "markdown == 3.4.4", "pytest == 6.2.5", "pytest-cov == 3.0.0", - "pytest-sugar == 0.9.7", + "pytest-sugar == 1.0.0", "ruff>=0.7.1", ] markdown = [ From 86945da73ca18b1e29825473a43c597f28fd52d7 Mon Sep 17 00:00:00 2001 From: Justin Mayer Date: Fri, 1 Nov 2024 10:34:49 +0100 Subject: [PATCH 15/23] Improve GitHub CI workflow Bring GitHub CI workflow in line with Cookiecutter plugin template. --- .github/workflows/main.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5a3f277..d435ee6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,16 +5,19 @@ on: [push, pull_request] env: PYTEST_ADDOPTS: "--color=yes" +permissions: + contents: read + jobs: test: - name: Test - ${{ matrix.python-version }} + name: Test - Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false - name: Install uv @@ -34,7 +37,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false @@ -48,12 +51,12 @@ jobs: - name: Install dependencies env: - UV_PYTHON: "python3.9" + UV_PYTHON: "python3.10" run: | uv sync --frozen --no-sources - name: Run linters - run: uv run invoke lint --diff + run: pdm run invoke lint --diff deploy: name: Deploy @@ -67,10 +70,7 @@ jobs: id-token: write steps: - - uses: actions/checkout@v3 - with: - token: ${{ secrets.GH_TOKEN }} - persist-credentials: false + - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v3 @@ -83,7 +83,7 @@ jobs: - name: Publish if: ${{ steps.check_release.outputs.autopub_release=='true' }} env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | uvx --with 'autopub[github]' autopub prepare uvx --with 'autopub[github]' autopub commit From aa16c384487b79cfdf4f356aae5a653195d102bf Mon Sep 17 00:00:00 2001 From: Justin Mayer Date: Fri, 1 Nov 2024 10:39:56 +0100 Subject: [PATCH 16/23] Fix incorrect tool change --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d435ee6..f98fd55 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -56,7 +56,7 @@ jobs: uv sync --frozen --no-sources - name: Run linters - run: pdm run invoke lint --diff + run: uv run invoke lint --diff deploy: name: Deploy From 5151b43a06bde58fb933ca880dcedc4396cd8491 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 07:42:08 -0700 Subject: [PATCH 17/23] Refresh tasks.py to work with uv - drop poetry from the tools - special-case installing uv - replace poetry instructions with uv instructions --- tasks.py | 59 +++++++++++++++++++++++++++++++++++++------------------- uv.lock | 8 ++++---- 2 files changed, 43 insertions(+), 24 deletions(-) diff --git a/tasks.py b/tasks.py index 74edc4c..2a543a1 100644 --- a/tasks.py +++ b/tasks.py @@ -8,7 +8,7 @@ PKG_NAME = "webassets" PKG_PATH = Path(f"pelican/plugins/{PKG_NAME}") -TOOLS = ("poetry", "pre-commit") +TOOLS = ["pre-commit"] ACTIVE_VENV = os.environ.get("VIRTUAL_ENV", None) VENV_HOME = Path(os.environ.get("WORKON_HOME", "~/.local/share/virtualenvs")) @@ -16,16 +16,13 @@ VENV = str(VENV_PATH.expanduser()) BIN_DIR = "bin" if os.name != "nt" else "Scripts" VENV_BIN = Path(VENV) / Path(BIN_DIR) -POETRY = which("poetry") if which("poetry") else (VENV_BIN / "poetry") -CMD_PREFIX = f"{VENV_BIN}/" if ACTIVE_VENV else f"{POETRY} run " -PRECOMMIT = which("pre-commit") if which("pre-commit") else f"{CMD_PREFIX}pre-commit" PTY = True if os.name != "nt" else False @task def tests(c): """Run the test suite.""" - c.run(f"{CMD_PREFIX}pytest", pty=PTY) + c.run("uv run pytest", pty=PTY) @task @@ -36,7 +33,7 @@ def ruff(c, fix=False, diff=False): fix_flag = "--fix" if diff: diff_flag = "--diff" - c.run(f"{CMD_PREFIX}/ruff check {diff_flag} {fix_flag} .", pty=PTY) + c.run(f"uv run ruff check {diff_flag} {fix_flag} .", pty=PTY) @task @@ -46,38 +43,60 @@ def lint(c, fix=False, diff=False): @task +def uv(c): + """Install uv in the local virtual environment.""" + if not which("uv"): + print("** Installing uv in the project virual environment.") + c.run(f"{VENV_BIN}/python -m pip install uv", pty=PTY) + + +@task(pre=[uv]) def tools(c): """Install development tools in the virtual environment if not already on PATH.""" for tool in TOOLS: if not which(tool): print(f"** Installing {tool}.") - c.run(f"{CMD_PREFIX}pip install {tool}") + c.run(f"uv pip install {tool}") -@task +@task(pre=[tools]) def precommit(c): """Install pre-commit hooks to `.git/hooks/pre-commit`.""" print("** Installing pre-commit hooks.") - c.run(f"{PRECOMMIT} install") + pre_commit_cmd = ( + which("pre-commit") if which("pre-commit") else f"{VENV_BIN}pre-commit" + ) + c.run(f"{pre_commit_cmd} install") @task def setup(c): - """Set up the development environment.""" - if which("poetry") or ACTIVE_VENV: - tools(c) - c.run(f"{CMD_PREFIX}python -m pip install --upgrade pip") - c.run(f"{POETRY} install") - precommit(c) - print("\nDevelopment environment should now be set up and ready!\n") - else: + """Set up the development environment. You must have `uv` installed.""" + if not which("uv"): error_message = """ - Poetry is not installed, and there is no active virtual environment available. + uv is not installed, and there is no active virtual environment available. You can either manually create and activate a virtual environment, or you can - install Poetry via: + install uv by running the following command: - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - + curl -LsSf https://astral.sh/uv/install.sh | sh Once you have taken one of the above two steps, run `invoke setup` again. """ # noqa: E501 sys.exit(cleandoc(error_message)) + + global ACTIVE_VENV + if not ACTIVE_VENV: + print("** Creating a virtual environment.") + c.run("uv venv") + ACTIVE_VENV = ".venv" + + tools(c) + c.run("uv sync") + precommit(c) + success_message = """ + Development environment should now be set up and ready. + + To enable running invoke, either run it with `uv run inv` or + activate the virtual environment with `source .venv/bin/activate` + """ + print(cleandoc(success_message)) diff --git a/uv.lock b/uv.lock index dd6c75f..5bfcd27 100644 --- a/uv.lock +++ b/uv.lock @@ -356,7 +356,7 @@ dev = [ { name = "markdown", specifier = "==3.4.4" }, { name = "pytest", specifier = "==6.2.5" }, { name = "pytest-cov", specifier = "==3.0.0" }, - { name = "pytest-sugar", specifier = "==0.9.7" }, + { name = "pytest-sugar", specifier = "==1.0.0" }, { name = "ruff", specifier = ">=0.7.1" }, ] markdown = [{ name = "markdown", specifier = "==3.4.4" }] @@ -426,16 +426,16 @@ wheels = [ [[package]] name = "pytest-sugar" -version = "0.9.7" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, { name = "pytest" }, { name = "termcolor" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/18/fe569040c5796879288544b1cc98888fce1754138d54e8287ed21614491e/pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46", size = 14874 } +sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b2/8f5d346c86e690c58da3b21b7c14d656b4100606abed8e91a98e8b50f3bf/pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062", size = 10034 }, + { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171 }, ] [[package]] From 7a5b1e3f4c56ebcc4328d185692a05c3201c5ac0 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:10:53 -0700 Subject: [PATCH 18/23] Switch from uv to pdm for consistency with the template --- pdm.lock | 841 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 6 +- tasks.py | 104 +++--- 3 files changed, 898 insertions(+), 53 deletions(-) create mode 100644 pdm.lock diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..8ef3a51 --- /dev/null +++ b/pdm.lock @@ -0,0 +1,841 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev", "markdown"] +strategy = ["cross_platform", "inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:11cb38534a01b288579e35f6f98cba06f0e1fdfc173cee4cda4008da95761089" + +[[metadata.targets]] +requires_python = "~=3.9" + +[[package]] +name = "anyio" +version = "4.6.2.post1" +requires_python = ">=3.9" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[[package]] +name = "atomicwrites" +version = "1.4.1" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +summary = "Atomic file writes." +groups = ["dev"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "24.2.0" +requires_python = ">=3.7" +summary = "Classes Without Boilerplate" +groups = ["dev"] +dependencies = [ + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[[package]] +name = "blinker" +version = "1.8.2" +requires_python = ">=3.8" +summary = "Fast, simple object-to-object and broadcast signaling" +groups = ["default"] +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["dev"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.4" +requires_python = ">=3.9" +summary = "Code coverage measurement for Python" +groups = ["dev"] +files = [ + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, +] + +[[package]] +name = "coverage" +version = "7.6.4" +extras = ["toml"] +requires_python = ">=3.9" +summary = "Code coverage measurement for Python" +groups = ["dev"] +dependencies = [ + "coverage==7.6.4", + "tomli; python_full_version <= \"3.11.0a6\"", +] +files = [ + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, +] + +[[package]] +name = "cssmin" +version = "0.2.0" +summary = "A Python port of the YUI CSS compression algorithm." +groups = ["dev"] +files = [ + {file = "cssmin-0.2.0.tar.gz", hash = "sha256:e012f0cc8401efcf2620332339011564738ae32be8c84b2e43ce8beaec1067b6"}, +] + +[[package]] +name = "docutils" +version = "0.21.2" +requires_python = ">=3.9" +summary = "Docutils -- Python Documentation Utilities" +groups = ["default"] +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["default"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "feedgenerator" +version = "2.1.0" +requires_python = ">=3.7" +summary = "Standalone version of django.utils.feedgenerator" +groups = ["default"] +dependencies = [ + "pytz>=0a", +] +files = [ + {file = "feedgenerator-2.1.0-py3-none-any.whl", hash = "sha256:93b7ce1c5a86195cafd6a8e9baf6a2a863ebd6d9905e840ce5778f73efd9a8d5"}, + {file = "feedgenerator-2.1.0.tar.gz", hash = "sha256:f075f23f28fd227f097c36b212161c6cf012e1c6caaf7ff53d5d6bb02cd42b9d"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +requires_python = ">=3.8" +summary = "Read metadata from Python packages" +groups = ["dev", "markdown"] +marker = "python_version < \"3.10\"" +dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", + "zipp>=3.20", +] +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +requires_python = ">=3.7" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "invoke" +version = "2.2.0" +requires_python = ">=3.6" +summary = "Pythonic task execution" +groups = ["dev"] +files = [ + {file = "invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820"}, + {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +requires_python = ">=3.7" +summary = "A very fast and expressive template engine." +groups = ["default"] +dependencies = [ + "MarkupSafe>=2.0", +] +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[[package]] +name = "libsass" +version = "0.23.0" +requires_python = ">=3.8" +summary = "Sass for Python: A straightforward binding of libsass for Python." +groups = ["dev"] +files = [ + {file = "libsass-0.23.0-cp38-abi3-macosx_11_0_x86_64.whl", hash = "sha256:34cae047cbbfc4ffa832a61cbb110f3c95f5471c6170c842d3fed161e40814dc"}, + {file = "libsass-0.23.0-cp38-abi3-macosx_14_0_arm64.whl", hash = "sha256:ea97d1b45cdc2fc3590cb9d7b60f1d8915d3ce17a98c1f2d4dd47ee0d9c68ce6"}, + {file = "libsass-0.23.0-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a218406d605f325d234e4678bd57126a66a88841cb95bee2caeafdc6f138306"}, + {file = "libsass-0.23.0-cp38-abi3-win32.whl", hash = "sha256:31e86d92a5c7a551df844b72d83fc2b5e50abc6fbbb31e296f7bebd6489ed1b4"}, + {file = "libsass-0.23.0-cp38-abi3-win_amd64.whl", hash = "sha256:a2ec85d819f353cbe807432d7275d653710d12b08ec7ef61c124a580a8352f3c"}, + {file = "libsass-0.23.0.tar.gz", hash = "sha256:6f209955ede26684e76912caf329f4ccb57e4a043fd77fe0e7348dd9574f1880"}, +] + +[[package]] +name = "markdown" +version = "3.4.4" +requires_python = ">=3.7" +summary = "Python implementation of John Gruber's Markdown." +groups = ["dev", "markdown"] +dependencies = [ + "importlib-metadata>=4.4; python_version < \"3.10\"", +] +files = [ + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["default"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +requires_python = ">=3.9" +summary = "Safely add untrusted strings to HTML/XML markup." +groups = ["default"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["default"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "ordered-set" +version = "4.1.0" +requires_python = ">=3.7" +summary = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +groups = ["default"] +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[[package]] +name = "packaging" +version = "24.1" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["dev"] +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pelican" +version = "4.10.1" +requires_python = "<4.0,>=3.8.1" +summary = "Static site generator supporting Markdown and reStructuredText" +groups = ["default"] +dependencies = [ + "backports-zoneinfo>=0.2.1; python_version < \"3.9\"", + "blinker>=1.7.0", + "docutils>=0.20.1", + "feedgenerator>=2.1.0", + "jinja2>=3.1.2", + "ordered-set>=4.1.0", + "pygments>=2.16.1", + "python-dateutil>=2.8.2", + "rich>=13.6.0", + "tzdata; sys_platform == \"win32\"", + "unidecode>=1.3.7", + "watchfiles>=0.21.0", +] +files = [ + {file = "pelican-4.10.1-py3-none-any.whl", hash = "sha256:5fb59933fd9eaff83a5e4e093a9a6249ec59ae0d8e5da63710d965acd6f5d3f7"}, + {file = "pelican-4.10.1.tar.gz", hash = "sha256:dfcd468ddb823c70df52545ca413b44505ff2d33132284d2f5a31d217cde6cb8"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "py" +version = "1.11.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +summary = "library with cross-python path, ini-parsing, io, code, log facilities" +groups = ["dev"] +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default"] +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +requires_python = ">=3.6" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "atomicwrites>=1.0; sys_platform == \"win32\"", + "attrs>=19.2.0", + "colorama; sys_platform == \"win32\"", + "importlib-metadata>=0.12; python_version < \"3.8\"", + "iniconfig", + "packaging", + "pluggy<2.0,>=0.12", + "py>=1.8.2", + "toml", +] +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +requires_python = ">=3.6" +summary = "Pytest plugin for measuring coverage." +groups = ["dev"] +dependencies = [ + "coverage[toml]>=5.2.1", + "pytest>=4.6", +] +files = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] + +[[package]] +name = "pytest-sugar" +version = "1.0.0" +summary = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +groups = ["dev"] +dependencies = [ + "packaging>=21.3", + "pytest>=6.2.0", + "termcolor>=2.1.0", +] +files = [ + {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, + {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Extensions to the standard Python datetime module" +groups = ["default"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[[package]] +name = "pytz" +version = "2024.2" +summary = "World timezone definitions, modern and historical" +groups = ["default"] +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "rich" +version = "13.9.4" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["default"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", +] +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[[package]] +name = "ruff" +version = "0.7.2" +requires_python = ">=3.7" +summary = "An extremely fast Python linter and code formatter, written in Rust." +groups = ["dev"] +files = [ + {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, + {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, + {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, + {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, + {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, + {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, + {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, +] + +[[package]] +name = "six" +version = "1.16.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default"] +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "termcolor" +version = "2.5.0" +requires_python = ">=3.9" +summary = "ANSI color formatting for output in terminal" +groups = ["dev"] +files = [ + {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"}, + {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python Library for Tom's Obvious, Minimal Language" +groups = ["dev"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.2" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_full_version <= \"3.11.0a6\"" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default"] +marker = "python_version < \"3.11\"" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +requires_python = ">=2" +summary = "Provider of IANA time zone data" +groups = ["default"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +requires_python = ">=3.5" +summary = "ASCII transliterations of Unicode text" +groups = ["default"] +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, +] + +[[package]] +name = "watchfiles" +version = "0.24.0" +requires_python = ">=3.8" +summary = "Simple, modern and high performance file watching and code reload in python." +groups = ["default"] +dependencies = [ + "anyio>=3.0.0", +] +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[[package]] +name = "zipp" +version = "3.20.2" +requires_python = ">=3.8" +summary = "Backport of pathlib-compatible object wrapper for zip files" +groups = ["dev", "markdown"] +marker = "python_version < \"3.10\"" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] diff --git a/pyproject.toml b/pyproject.toml index 7d1c5b9..b471b8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ git-email = "52496925+botpub@users.noreply.github.com" append-github-contributor = true [tool.ruff] -exclude = ["pelican/plugins/webassets/vendor/"] +exclude = ["pelican/plugins/webassets/vendor/", ".venv"] [tool.ruff.lint.isort] section-order = ["future", "standard-library", "first-party", "pelican", "local-folder", "third-party"] @@ -14,6 +14,8 @@ section-order = ["future", "standard-library", "first-party", "pelican", "local- [tool.ruff.lint.isort.sections] pelican = ["pelican"] +[tool.pdm] + [build-system] requires = ["hatchling", "hatch-vcs", "hatch-fancy-pypi-readme>=22.8.0"] build-backend = "hatchling.build" @@ -36,7 +38,7 @@ documentation = "https://docs.getpelican.com" packages = [ { include = "pelican" }, ] -requires-python = "~= 3.7" +requires-python = "~= 3.9" classifiers = [ "Development Status :: 5 - Production/Stable", diff --git a/tasks.py b/tasks.py index 2a543a1..fb47893 100644 --- a/tasks.py +++ b/tasks.py @@ -1,14 +1,15 @@ +import logging import os -import sys from inspect import cleandoc from pathlib import Path from shutil import which from invoke import task +logger = logging.getLogger(__name__) + PKG_NAME = "webassets" PKG_PATH = Path(f"pelican/plugins/{PKG_NAME}") -TOOLS = ["pre-commit"] ACTIVE_VENV = os.environ.get("VIRTUAL_ENV", None) VENV_HOME = Path(os.environ.get("WORKON_HOME", "~/.local/share/virtualenvs")) @@ -16,87 +17,88 @@ VENV = str(VENV_PATH.expanduser()) BIN_DIR = "bin" if os.name != "nt" else "Scripts" VENV_BIN = Path(VENV) / Path(BIN_DIR) -PTY = True if os.name != "nt" else False + +TOOLS = ("cruft", "pdm", "pre-commit") +PDM = which("pdm") if which("pdm") else (VENV_BIN / "pdm") +CMD_PREFIX = f"{VENV_BIN}/" if ACTIVE_VENV else f"{PDM} run " +CRUFT = which("cruft") if which("cruft") else f"{CMD_PREFIX}cruft" +PRECOMMIT = which("pre-commit") if which("pre-commit") else f"{CMD_PREFIX}pre-commit" +PTY = os.name != "nt" + + +@task +def tests(c, deprecations=False): + """Run the test suite, optionally with `--deprecations`.""" + deprecations_flag = "" if deprecations else "-W ignore::DeprecationWarning" + c.run(f"{CMD_PREFIX}pytest {deprecations_flag}", pty=PTY) @task -def tests(c): - """Run the test suite.""" - c.run("uv run pytest", pty=PTY) +def format(c, check=False, diff=False): + """Run Ruff's auto-formatter, optionally with `--check` or `--diff`.""" + check_flag, diff_flag = "", "" + if check: + check_flag = "--check" + if diff: + diff_flag = "--diff" + c.run( + f"{CMD_PREFIX}ruff format {check_flag} {diff_flag} {PKG_PATH} tasks.py", pty=PTY + ) @task -def ruff(c, fix=False, diff=False): +def ruff(c, concise=False, fix=False, diff=False): """Run Ruff to ensure code meets project standards.""" - diff_flag, fix_flag = "", "" + concise_flag, fix_flag, diff_flag = "", "", "" + if concise: + concise_flag = "--output-format=concise" if fix: fix_flag = "--fix" if diff: diff_flag = "--diff" - c.run(f"uv run ruff check {diff_flag} {fix_flag} .", pty=PTY) + c.run(f"{CMD_PREFIX}ruff check {concise_flag} {diff_flag} {fix_flag} .", pty=PTY) @task -def lint(c, fix=False, diff=False): +def lint(c, concise=False, fix=False, diff=False): """Check code style via linting tools.""" - ruff(c, fix=fix, diff=diff) + ruff(c, concise=concise, fix=fix, diff=diff) + format(c, check=(not fix), diff=diff) @task -def uv(c): - """Install uv in the local virtual environment.""" - if not which("uv"): - print("** Installing uv in the project virual environment.") - c.run(f"{VENV_BIN}/python -m pip install uv", pty=PTY) - - -@task(pre=[uv]) def tools(c): """Install development tools in the virtual environment if not already on PATH.""" for tool in TOOLS: if not which(tool): - print(f"** Installing {tool}.") - c.run(f"uv pip install {tool}") + logger.info(f"** Installing {tool} **") + c.run(f"{CMD_PREFIX}pip install {tool}") -@task(pre=[tools]) +@task def precommit(c): - """Install pre-commit hooks to `.git/hooks/pre-commit`.""" - print("** Installing pre-commit hooks.") - pre_commit_cmd = ( - which("pre-commit") if which("pre-commit") else f"{VENV_BIN}pre-commit" - ) - c.run(f"{pre_commit_cmd} install") + """Install pre-commit hooks to .git/hooks/pre-commit.""" + logger.info("** Installing pre-commit hooks **") + c.run(f"{PRECOMMIT} install") @task def setup(c): - """Set up the development environment. You must have `uv` installed.""" - if not which("uv"): + """Set up the development environment.""" + if which("pdm") or ACTIVE_VENV: + tools(c) + c.run(f"{CMD_PREFIX}python -m pip install --upgrade pip", pty=PTY) + c.run(f"{PDM} update --dev", pty=PTY) + precommit(c) + logger.info("\nDevelopment environment should now be set up and ready!\n") + else: error_message = """ - uv is not installed, and there is no active virtual environment available. + PDM is not installed, and there is no active virtual environment available. You can either manually create and activate a virtual environment, or you can - install uv by running the following command: + install PDM via: - curl -LsSf https://astral.sh/uv/install.sh | sh + curl -sSL https://raw.githubusercontent.com/pdm-project/pdm/main/install-pdm.py | python3 - Once you have taken one of the above two steps, run `invoke setup` again. """ # noqa: E501 - sys.exit(cleandoc(error_message)) - - global ACTIVE_VENV - if not ACTIVE_VENV: - print("** Creating a virtual environment.") - c.run("uv venv") - ACTIVE_VENV = ".venv" - - tools(c) - c.run("uv sync") - precommit(c) - success_message = """ - Development environment should now be set up and ready. - - To enable running invoke, either run it with `uv run inv` or - activate the virtual environment with `source .venv/bin/activate` - """ - print(cleandoc(success_message)) + raise SystemExit(cleandoc(error_message)) From 9485816f29f2f7e56f72f10eb55e403095469505 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:22:41 -0700 Subject: [PATCH 19/23] Remove uv.lock --- uv.lock | 562 -------------------------------------------------------- 1 file changed, 562 deletions(-) delete mode 100644 uv.lock diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 5bfcd27..0000000 --- a/uv.lock +++ /dev/null @@ -1,562 +0,0 @@ -version = 1 -requires-python = ">=3.7, <4" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/87/c6/53da25344e3e3a9c01095a89f16dbcda021c609ddb42dd6d7c0528236fb2/atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11", size = 14227 } - -[[package]] -name = "attrs" -version = "24.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, -] - -[[package]] -name = "blinker" -version = "1.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ea/96/ed1420a974540da7419094f2553bc198c454cee5f72576e7c7629dd12d6e/blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d", size = 28092 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/2b/11bcedb7dee4923253a4a21bae3be854bcc4f06295bd827756352016d97c/blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa", size = 13398 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "coverage" -version = "7.2.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/24/be01e62a7bce89bcffe04729c540382caa5a06bee45ae42136c93e2499f5/coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8", size = 200724 }, - { url = "https://files.pythonhosted.org/packages/3d/80/7060a445e1d2c9744b683dc935248613355657809d6c6b2716cdf4ca4766/coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb", size = 201024 }, - { url = "https://files.pythonhosted.org/packages/b8/9d/926fce7e03dbfc653104c2d981c0fa71f0572a9ebd344d24c573bd6f7c4f/coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6", size = 229528 }, - { url = "https://files.pythonhosted.org/packages/d1/3a/67f5d18f911abf96857f6f7e4df37ca840e38179e2cc9ab6c0b9c3380f19/coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2", size = 227842 }, - { url = "https://files.pythonhosted.org/packages/b4/bd/1b2331e3a04f4cc9b7b332b1dd0f3a1261dfc4114f8479bebfcc2afee9e8/coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063", size = 228717 }, - { url = "https://files.pythonhosted.org/packages/2b/86/3dbf9be43f8bf6a5ca28790a713e18902b2d884bc5fa9512823a81dff601/coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1", size = 234632 }, - { url = "https://files.pythonhosted.org/packages/91/e8/469ed808a782b9e8305a08bad8c6fa5f8e73e093bda6546c5aec68275bff/coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353", size = 232875 }, - { url = "https://files.pythonhosted.org/packages/29/8f/4fad1c2ba98104425009efd7eaa19af9a7c797e92d40cd2ec026fa1f58cb/coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495", size = 234094 }, - { url = "https://files.pythonhosted.org/packages/94/4e/d4e46a214ae857be3d7dc5de248ba43765f60daeb1ab077cb6c1536c7fba/coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818", size = 203184 }, - { url = "https://files.pythonhosted.org/packages/1f/e9/d6730247d8dec2a3dddc520ebe11e2e860f0f98cee3639e23de6cf920255/coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850", size = 204096 }, - { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895 }, - { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120 }, - { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178 }, - { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754 }, - { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558 }, - { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509 }, - { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924 }, - { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977 }, - { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168 }, - { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185 }, - { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020 }, - { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994 }, - { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358 }, - { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316 }, - { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159 }, - { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127 }, - { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833 }, - { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463 }, - { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347 }, - { url = "https://files.pythonhosted.org/packages/80/d7/67937c80b8fd4c909fdac29292bc8b35d9505312cff6bcab41c53c5b1df6/coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f", size = 200580 }, - { url = "https://files.pythonhosted.org/packages/7a/05/084864fa4bbf8106f44fb72a56e67e0cd372d3bf9d893be818338c81af5d/coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb", size = 226237 }, - { url = "https://files.pythonhosted.org/packages/67/a2/6fa66a50e6e894286d79a3564f42bd54a9bd27049dc0a63b26d9924f0aa3/coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9", size = 224256 }, - { url = "https://files.pythonhosted.org/packages/e2/c0/73f139794c742840b9ab88e2e17fe14a3d4668a166ff95d812ac66c0829d/coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd", size = 225550 }, - { url = "https://files.pythonhosted.org/packages/03/ec/6f30b4e0c96ce03b0e64aec46b4af2a8c49b70d1b5d0d69577add757b946/coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a", size = 232440 }, - { url = "https://files.pythonhosted.org/packages/22/c1/2f6c1b6f01a0996c9e067a9c780e1824351dbe17faae54388a4477e6d86f/coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959", size = 230897 }, - { url = "https://files.pythonhosted.org/packages/8d/d6/53e999ec1bf7498ca4bc5f3b8227eb61db39068d2de5dcc359dec5601b5a/coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02", size = 232024 }, - { url = "https://files.pythonhosted.org/packages/e9/40/383305500d24122dbed73e505a4d6828f8f3356d1f68ab6d32c781754b81/coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f", size = 203293 }, - { url = "https://files.pythonhosted.org/packages/0e/bc/7e3a31534fabb043269f14fb64e2bb2733f85d4cf39e5bbc71357c57553a/coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0", size = 204040 }, - { url = "https://files.pythonhosted.org/packages/c6/fc/be19131010930a6cf271da48202c8cc1d3f971f68c02fb2d3a78247f43dc/coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5", size = 200689 }, - { url = "https://files.pythonhosted.org/packages/28/d7/9a8de57d87f4bbc6f9a6a5ded1eaac88a89bf71369bb935dac3c0cf2893e/coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5", size = 200986 }, - { url = "https://files.pythonhosted.org/packages/c8/e4/e6182e4697665fb594a7f4e4f27cb3a4dd00c2e3d35c5c706765de8c7866/coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9", size = 230648 }, - { url = "https://files.pythonhosted.org/packages/7b/e3/f552d5871943f747165b92a924055c5d6daa164ae659a13f9018e22f3990/coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6", size = 228511 }, - { url = "https://files.pythonhosted.org/packages/44/55/49f65ccdd4dfd6d5528e966b28c37caec64170c725af32ab312889d2f857/coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e", size = 229852 }, - { url = "https://files.pythonhosted.org/packages/0d/31/340428c238eb506feb96d4fb5c9ea614db1149517f22cc7ab8c6035ef6d9/coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050", size = 235578 }, - { url = "https://files.pythonhosted.org/packages/dd/ce/97c1dd6592c908425622fe7f31c017d11cf0421729b09101d4de75bcadc8/coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5", size = 234079 }, - { url = "https://files.pythonhosted.org/packages/de/a3/5a98dc9e239d0dc5f243ef5053d5b1bdcaa1dee27a691dfc12befeccf878/coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f", size = 234991 }, - { url = "https://files.pythonhosted.org/packages/4a/fb/78986d3022e5ccf2d4370bc43a5fef8374f092b3c21d32499dee8e30b7b6/coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e", size = 203160 }, - { url = "https://files.pythonhosted.org/packages/c3/1c/6b3c9c363fb1433c79128e0d692863deb761b1b78162494abb9e5c328bc0/coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c", size = 204085 }, - { url = "https://files.pythonhosted.org/packages/88/da/495944ebf0ad246235a6bd523810d9f81981f9b81c6059ba1f56e943abe0/coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9", size = 200725 }, - { url = "https://files.pythonhosted.org/packages/ca/0c/3dfeeb1006c44b911ee0ed915350db30325d01808525ae7cc8d57643a2ce/coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2", size = 201022 }, - { url = "https://files.pythonhosted.org/packages/61/af/5964b8d7d9a5c767785644d9a5a63cacba9a9c45cc42ba06d25895ec87be/coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7", size = 229102 }, - { url = "https://files.pythonhosted.org/packages/d9/1d/cd467fceb62c371f9adb1d739c92a05d4e550246daa90412e711226bd320/coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e", size = 227441 }, - { url = "https://files.pythonhosted.org/packages/fe/57/e4f8ad64d84ca9e759d783a052795f62a9f9111585e46068845b1cb52c2b/coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1", size = 228265 }, - { url = "https://files.pythonhosted.org/packages/88/8b/b0d9fe727acae907fa7f1c8194ccb6fe9d02e1c3e9001ecf74c741f86110/coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9", size = 234217 }, - { url = "https://files.pythonhosted.org/packages/66/2e/c99fe1f6396d93551aa352c75410686e726cd4ea104479b9af1af22367ce/coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250", size = 232466 }, - { url = "https://files.pythonhosted.org/packages/bb/e9/88747b40c8fb4a783b40222510ce6d66170217eb05d7f46462c36b4fa8cc/coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2", size = 233669 }, - { url = "https://files.pythonhosted.org/packages/b1/d5/a8e276bc005e42114468d4fe03e0a9555786bc51cbfe0d20827a46c1565a/coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb", size = 203199 }, - { url = "https://files.pythonhosted.org/packages/a9/0c/4a848ae663b47f1195abcb09a951751dd61f80b503303b9b9d768e0fd321/coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27", size = 204109 }, - { url = "https://files.pythonhosted.org/packages/67/fb/b3b1d7887e1ea25a9608b0776e480e4bbc303ca95a31fd585555ec4fff5a/coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d", size = 193207 }, -] - -[package.optional-dependencies] -toml = [ - { name = "tomli", marker = "python_full_version <= '3.11'" }, -] - -[[package]] -name = "cssmin" -version = "0.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/d8/dc9da69bb186303f7ab41adef0a5b6d34da2fdba006827620877760241c3/cssmin-0.2.0.tar.gz", hash = "sha256:e012f0cc8401efcf2620332339011564738ae32be8c84b2e43ce8beaec1067b6", size = 3228 } - -[[package]] -name = "docutils" -version = "0.20.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/53/a5da4f2c5739cf66290fac1431ee52aff6851c7c8ffd8264f13affd7bcdd/docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b", size = 2058365 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", size = 572666 }, -] - -[[package]] -name = "feedgenerator" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytz" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5e/4e/0efde53652edbae3f86c0ec67260bb53287edc67033ac8d00fe08cd02557/feedgenerator-2.1.0.tar.gz", hash = "sha256:f075f23f28fd227f097c36b212161c6cf012e1c6caaf7ff53d5d6bb02cd42b9d", size = 20682 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/a1/b7b1711d9bf43c3795366431633ab6ba6942744243aad809272ebfa59b39/feedgenerator-2.1.0-py3-none-any.whl", hash = "sha256:93b7ce1c5a86195cafd6a8e9baf6a2a863ebd6d9905e840ce5778f73efd9a8d5", size = 21796 }, -] - -[[package]] -name = "importlib-metadata" -version = "6.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.8'" }, - { name = "zipp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/82/f6e29c8d5c098b6be61460371c2c5591f4a335923639edec43b3830650a4/importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4", size = 53569 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/94/64287b38c7de4c90683630338cf28f129decbba0a44f0c6db35a873c73c4/importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5", size = 22934 }, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, -] - -[[package]] -name = "invoke" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/42/127e6d792884ab860defc3f4d80a8f9812e48ace584ffc5a346de58cdc6c/invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5", size = 299835 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820", size = 160274 }, -] - -[[package]] -name = "jinja2" -version = "3.1.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, -] - -[[package]] -name = "libsass" -version = "0.22.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/55/14/f1d9578dce39f890ae3c0f93db8a23e89d2a1403da81d307ffb429df7c3b/libsass-0.22.0.tar.gz", hash = "sha256:3ab5ad18e47db560f4f0c09e3d28cf3bb1a44711257488ac2adad69f4f7f8425", size = 316258 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/be/178b03e598e86bf1faa70f9ebe4cfad236dcf159af11a39f30ac8d7ce693/libsass-0.22.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1efc1b612299c88aec9e39d6ca0c266d360daa5b19d9430bdeaffffa86993f9", size = 9443966 }, - { url = "https://files.pythonhosted.org/packages/92/fd/73b8081c5bc2b11b61596f74b54d45226633313c2a4de53205da948fc01c/libsass-0.22.0-cp37-abi3-macosx_10_15_x86_64.whl", hash = "sha256:081e256ab3c5f3f09c7b8dea3bf3bf5e64a97c6995fd9eea880639b3f93a9f9a", size = 1064075 }, - { url = "https://files.pythonhosted.org/packages/f5/ee/844666d66a4dbb7b81e3c0257253963091f2abec8454ce7abf6f89b409f9/libsass-0.22.0-cp37-abi3-win32.whl", hash = "sha256:89c5ce497fcf3aba1dd1b19aae93b99f68257e5f2026b731b00a872f13324c7f", size = 775665 }, - { url = "https://files.pythonhosted.org/packages/43/33/ccd65ef94bf37ad01ad4f82c3426d9274eee7d448ca9c23e516d03322520/libsass-0.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:65455a2728b696b62100eb5932604aa13a29f4ac9a305d95773c14aaa7200aaf", size = 880885 }, - { url = "https://files.pythonhosted.org/packages/15/99/f0bbc4ccd254ad7b8e76fe878f11eb98a1931fa9bc46d599a52443641bff/libsass-0.22.0-cp38-abi3-macosx_14_0_arm64.whl", hash = "sha256:5fb2297a4754a6c8e25cfe5c015a3b51a2b6b9021b333f989bb8ce9d60eb5828", size = 982832 }, -] - -[[package]] -name = "markdown" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/87/2a/62841f4fb1fef5fa015ded48d02401cd95643ca03b6760b29437b62a04a4/Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6", size = 324459 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/b5/228c1cdcfe138f1a8e01ab1b54284c8b83735476cb22b6ba251656ed13ad/Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941", size = 94174 }, -] - -[[package]] -name = "markdown-it-py" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, - { name = "typing-extensions", marker = "python_full_version < '3.8'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e4/c0/59bd6d0571986f72899288a95d9d6178d0eebd70b6650f1bb3f0da90f8f7/markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1", size = 67120 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/25/2d88e8feee8e055d015343f9b86e370a1ccbec546f2865c98397aaef24af/markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", size = 84466 }, -] - -[[package]] -name = "markupsafe" -version = "2.1.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/54/ad5eb37bf9d51800010a74e4665425831a9db4e7c4e0fde4352e391e808e/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", size = 18206 }, - { url = "https://files.pythonhosted.org/packages/6a/4a/a4d49415e600bacae038c67f9fecc1d5433b9d3c71a4de6f33537b89654c/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", size = 14079 }, - { url = "https://files.pythonhosted.org/packages/0a/7b/85681ae3c33c385b10ac0f8dd025c30af83c78cec1c37a6aa3b55e67f5ec/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", size = 26620 }, - { url = "https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", size = 25818 }, - { url = "https://files.pythonhosted.org/packages/29/fe/a36ba8c7ca55621620b2d7c585313efd10729e63ef81e4e61f52330da781/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", size = 25493 }, - { url = "https://files.pythonhosted.org/packages/60/ae/9c60231cdfda003434e8bd27282b1f4e197ad5a710c14bee8bea8a9ca4f0/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", size = 30630 }, - { url = "https://files.pythonhosted.org/packages/65/dc/1510be4d179869f5dafe071aecb3f1f41b45d37c02329dfba01ff59e5ac5/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", size = 29745 }, - { url = "https://files.pythonhosted.org/packages/30/39/8d845dd7d0b0613d86e0ef89549bfb5f61ed781f59af45fc96496e897f3a/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", size = 30021 }, - { url = "https://files.pythonhosted.org/packages/c7/5c/356a6f62e4f3c5fbf2602b4771376af22a3b16efa74eb8716fb4e328e01e/MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", size = 16659 }, - { url = "https://files.pythonhosted.org/packages/69/48/acbf292615c65f0604a0c6fc402ce6d8c991276e16c80c46a8f758fbd30c/MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", size = 17213 }, - { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, - { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, - { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, - { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, - { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, - { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, - { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, - { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, - { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, - { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, - { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, - { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, - { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, - { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, - { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, - { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, - { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, - { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, - { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, - { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, - { url = "https://files.pythonhosted.org/packages/a7/88/a940e11827ea1c136a34eca862486178294ae841164475b9ab216b80eb8e/MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", size = 13982 }, - { url = "https://files.pythonhosted.org/packages/cb/06/0d28bd178db529c5ac762a625c335a9168a7a23f280b4db9c95e97046145/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", size = 26335 }, - { url = "https://files.pythonhosted.org/packages/4a/1d/c4f5016f87ced614eacc7d5fb85b25bcc0ff53e8f058d069fc8cbfdc3c7a/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", size = 25557 }, - { url = "https://files.pythonhosted.org/packages/b3/fb/c18b8c9fbe69e347fdbf782c6478f1bc77f19a830588daa224236678339b/MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", size = 25245 }, - { url = "https://files.pythonhosted.org/packages/2f/69/30d29adcf9d1d931c75001dd85001adad7374381c9c2086154d9f6445be6/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", size = 31013 }, - { url = "https://files.pythonhosted.org/packages/3a/03/63498d05bd54278b6ca340099e5b52ffb9cdf2ee4f2d9b98246337e21689/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", size = 30178 }, - { url = "https://files.pythonhosted.org/packages/68/79/11b4fe15124692f8673b603433e47abca199a08ecd2a4851bfbdc97dc62d/MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", size = 30429 }, - { url = "https://files.pythonhosted.org/packages/ed/88/408bdbf292eb86f03201c17489acafae8358ba4e120d92358308c15cea7c/MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", size = 16633 }, - { url = "https://files.pythonhosted.org/packages/6c/4c/3577a52eea1880538c435176bc85e5b3379b7ab442327ccd82118550758f/MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", size = 17215 }, - { url = "https://files.pythonhosted.org/packages/f8/ff/2c942a82c35a49df5de3a630ce0a8456ac2969691b230e530ac12314364c/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", size = 18192 }, - { url = "https://files.pythonhosted.org/packages/4f/14/6f294b9c4f969d0c801a4615e221c1e084722ea6114ab2114189c5b8cbe0/MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", size = 14072 }, - { url = "https://files.pythonhosted.org/packages/81/d4/fd74714ed30a1dedd0b82427c02fa4deec64f173831ec716da11c51a50aa/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", size = 26928 }, - { url = "https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", size = 26106 }, - { url = "https://files.pythonhosted.org/packages/4c/6f/f2b0f675635b05f6afd5ea03c094557bdb8622fa8e673387444fe8d8e787/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68", size = 25781 }, - { url = "https://files.pythonhosted.org/packages/51/e0/393467cf899b34a9d3678e78961c2c8cdf49fb902a959ba54ece01273fb1/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", size = 30518 }, - { url = "https://files.pythonhosted.org/packages/f6/02/5437e2ad33047290dafced9df741d9efc3e716b75583bbd73a9984f1b6f7/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", size = 29669 }, - { url = "https://files.pythonhosted.org/packages/0e/7d/968284145ffd9d726183ed6237c77938c021abacde4e073020f920e060b2/MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", size = 29933 }, - { url = "https://files.pythonhosted.org/packages/bf/f3/ecb00fc8ab02b7beae8699f34db9357ae49d9f21d4d3de6f305f34fa949e/MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", size = 16656 }, - { url = "https://files.pythonhosted.org/packages/92/21/357205f03514a49b293e214ac39de01fadd0970a6e05e4bf1ddd0ffd0881/MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", size = 17206 }, - { url = "https://files.pythonhosted.org/packages/0f/31/780bb297db036ba7b7bbede5e1d7f1e14d704ad4beb3ce53fb495d22bc62/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", size = 18193 }, - { url = "https://files.pythonhosted.org/packages/6c/77/d77701bbef72892affe060cdacb7a2ed7fd68dae3b477a8642f15ad3b132/MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", size = 14073 }, - { url = "https://files.pythonhosted.org/packages/d9/a7/1e558b4f78454c8a3a0199292d96159eb4d091f983bc35ef258314fe7269/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", size = 26486 }, - { url = "https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", size = 25685 }, - { url = "https://files.pythonhosted.org/packages/6a/18/ae5a258e3401f9b8312f92b028c54d7026a97ec3ab20bfaddbdfa7d8cce8/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", size = 25338 }, - { url = "https://files.pythonhosted.org/packages/0b/cc/48206bd61c5b9d0129f4d75243b156929b04c94c09041321456fd06a876d/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", size = 30439 }, - { url = "https://files.pythonhosted.org/packages/d1/06/a41c112ab9ffdeeb5f77bc3e331fdadf97fa65e52e44ba31880f4e7f983c/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", size = 29531 }, - { url = "https://files.pythonhosted.org/packages/02/8c/ab9a463301a50dab04d5472e998acbd4080597abc048166ded5c7aa768c8/MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", size = 29823 }, - { url = "https://files.pythonhosted.org/packages/bc/29/9bc18da763496b055d8e98ce476c8e718dcfd78157e17f555ce6dd7d0895/MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", size = 16658 }, - { url = "https://files.pythonhosted.org/packages/f6/f8/4da07de16f10551ca1f640c92b5f316f9394088b183c6a57183df6de5ae4/MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", size = 17211 }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, -] - -[[package]] -name = "packaging" -version = "24.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9", size = 147882 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488 }, -] - -[[package]] -name = "pelican" -version = "4.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "blinker" }, - { name = "docutils" }, - { name = "feedgenerator" }, - { name = "jinja2" }, - { name = "pygments" }, - { name = "python-dateutil" }, - { name = "pytz" }, - { name = "rich" }, - { name = "unidecode" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/37837339c0cec3b4490ab3edd98b8510af8cb46469a75055d4458a3be22c/pelican-4.8.0.tar.gz", hash = "sha256:6445c00cff2142a30592a2de046e5647b84a36c5a0cfafc0eba75abbabb2b4b1", size = 1137954 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/51/b9a57e22a033a9cef7f6aae8b90adb2957d45c7fa1121d9962b42320c048/pelican-4.8.0-py3-none-any.whl", hash = "sha256:c80a81930f57f9b1a11c9ab5894ce1465dcda2028c9e4e3993cf9cbf2061a57d", size = 1396992 }, -] - -[[package]] -name = "pelican-webassets" -version = "2.0.0" -source = { editable = "." } -dependencies = [ - { name = "pelican" }, -] - -[package.dependency-groups] -dev = [ - { name = "cssmin" }, - { name = "invoke" }, - { name = "libsass" }, - { name = "markdown" }, - { name = "pytest" }, - { name = "pytest-cov" }, - { name = "pytest-sugar" }, - { name = "ruff" }, -] -markdown = [ - { name = "markdown" }, -] - -[package.metadata] -requires-dist = [{ name = "pelican", specifier = ">=4.5" }] - -[package.metadata.dependency-groups] -dev = [ - { name = "cssmin", specifier = "==0.2.0" }, - { name = "invoke", specifier = "==2.2.0" }, - { name = "libsass", specifier = ">=0.22.0" }, - { name = "markdown", specifier = "==3.4.4" }, - { name = "pytest", specifier = "==6.2.5" }, - { name = "pytest-cov", specifier = "==3.0.0" }, - { name = "pytest-sugar", specifier = "==1.0.0" }, - { name = "ruff", specifier = ">=0.7.1" }, -] -markdown = [{ name = "markdown", specifier = "==3.4.4" }] - -[[package]] -name = "pluggy" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8a/42/8f2833655a29c4e9cb52ee8a2be04ceac61bcff4a680fb338cbd3d1e322d/pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3", size = 61613 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/32/4a79112b8b87b21450b066e102d6608907f4c885ed7b04c3fdb085d4d6ae/pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", size = 17695 }, -] - -[[package]] -name = "py" -version = "1.11.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708 }, -] - -[[package]] -name = "pygments" -version = "2.17.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/55/59/8bccf4157baf25e4aa5a0bb7fa3ba8600907de105ebc22b0c78cfbf6f565/pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367", size = 4827772 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/97/9c/372fef8377a6e340b1704768d20daaded98bf13282b5327beb2e2fe2c7ef/pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c", size = 1179756 }, -] - -[[package]] -name = "pytest" -version = "6.2.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "atomicwrites", marker = "sys_platform == 'win32'" }, - { name = "attrs" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.8'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "py" }, - { name = "toml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/24/7d1f2d2537de114bdf1e6875115113ca80091520948d370c964b88070af2/pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", size = 1118720 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/76/86f886e750b81a4357b6ed606b2bcf0ce6d6c27ad3c09ebf63ed674fc86e/pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134", size = 280654 }, -] - -[[package]] -name = "pytest-cov" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage", extra = ["toml"] }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/41/e046526849972555928a6d31c2068410e47a31fb5ab0a77f868596811329/pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470", size = 61440 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/49/b3e0edec68d81846f519c602ac38af9db86e1e71275528b3e814ae236063/pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6", size = 20981 }, -] - -[[package]] -name = "pytest-sugar" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, - { name = "pytest" }, - { name = "termcolor" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171 }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, -] - -[[package]] -name = "pytz" -version = "2024.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, -] - -[[package]] -name = "rich" -version = "13.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, - { name = "pygments" }, - { name = "typing-extensions", marker = "python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/92/76/40f084cb7db51c9d1fa29a7120717892aeda9a7711f6225692c957a93535/rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a", size = 222080 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/11/dadb85e2bd6b1f1ae56669c3e1f0410797f9605d752d68fb47b77f525b31/rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06", size = 241608 }, -] - -[[package]] -name = "ruff" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/21/5c6e05e0fd3fbb41be4fb92edbc9a04de70baf60adb61435ce0c6b8c3d55/ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4", size = 3181670 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/65/45/8a20a9920175c9c4892b2420f80ff3cf14949cf3067118e212f9acd9c908/ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89", size = 10389268 }, - { url = "https://files.pythonhosted.org/packages/1b/d3/2f8382db2cf4f9488e938602e33e36287f9d26cb283aa31f11c31297ce79/ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35", size = 10188348 }, - { url = "https://files.pythonhosted.org/packages/a2/31/7d14e2a88da351200f844b7be889a0845d9e797162cf76b136d21b832a23/ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99", size = 9841448 }, - { url = "https://files.pythonhosted.org/packages/db/99/738cafdc768eceeca0bd26c6f03e213aa91203d2278e1d95b1c31c4ece41/ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca", size = 10674864 }, - { url = "https://files.pythonhosted.org/packages/fe/12/bcf2836b50eab53c65008383e7d55201e490d75167c474f14a16e1af47d2/ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250", size = 10192105 }, - { url = "https://files.pythonhosted.org/packages/2b/71/261d5d668bf98b6c44e89bfb5dfa4cb8cb6c8b490a201a3d8030e136ea4f/ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c", size = 11194144 }, - { url = "https://files.pythonhosted.org/packages/90/1f/0926d18a3b566fa6e7b3b36093088e4ffef6b6ba4ea85a462d9a93f7e35c/ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565", size = 11917066 }, - { url = "https://files.pythonhosted.org/packages/cd/a8/9fac41f128b6a44ab4409c1493430b4ee4b11521e8aeeca19bfe1ce851f9/ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7", size = 11458821 }, - { url = "https://files.pythonhosted.org/packages/25/cd/59644168f086ab13fe4e02943b9489a0aa710171f66b178e179df5383554/ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a", size = 12700379 }, - { url = "https://files.pythonhosted.org/packages/fb/30/3bac63619eb97174661829c07fc46b2055a053dee72da29d7c304c1cd2c0/ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad", size = 11019813 }, - { url = "https://files.pythonhosted.org/packages/4b/af/f567b885b5cb3bcdbcca3458ebf210cc8c9c7a9f61c332d3c2a050c3b21e/ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112", size = 10662146 }, - { url = "https://files.pythonhosted.org/packages/bc/ad/eb930d3ad117a9f2f7261969c21559ebd82bb13b6e8001c7caed0d44be5f/ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378", size = 10256911 }, - { url = "https://files.pythonhosted.org/packages/20/d5/af292ce70a016fcec792105ca67f768b403dd480a11888bc1f418fed0dd5/ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8", size = 10767488 }, - { url = "https://files.pythonhosted.org/packages/24/85/cc04a3bd027f433bebd2a097e63b3167653c079f7f13d8f9a1178e693412/ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd", size = 11093368 }, - { url = "https://files.pythonhosted.org/packages/0b/fb/c39cbf32d1f3e318674b8622f989417231794926b573f76dd4d0ca49f0f1/ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9", size = 8594180 }, - { url = "https://files.pythonhosted.org/packages/5a/71/ec8cdea34ecb90c830ca60d54ac7b509a7b5eab50fae27e001d4470fe813/ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307", size = 9419751 }, - { url = "https://files.pythonhosted.org/packages/79/7b/884553415e9f0a9bf358ed52fb68b934e67ef6c5a62397ace924a1afdf9a/ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37", size = 8717402 }, -] - -[[package]] -name = "six" -version = "1.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, -] - -[[package]] -name = "termcolor" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/85/147a0529b4e80b6b9d021ca8db3a820fcac53ec7374b87073d004aaf444c/termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a", size = 12163 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/67/e1/434566ffce04448192369c1a282931cf4ae593e91907558eaecd2e9f2801/termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", size = 6872 }, -] - -[[package]] -name = "toml" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, -] - -[[package]] -name = "tomli" -version = "2.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, -] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3c/8b/0111dd7d6c1478bf83baa1cab85c686426c7a6274119aceb2bd9d35395ad/typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2", size = 72876 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/6b/63cc3df74987c36fe26157ee12e09e8f9db4de771e0f3404263117e75b95/typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36", size = 33232 }, -] - -[[package]] -name = "unidecode" -version = "1.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/89/19151076a006b9ac0dd37b1354e031f5297891ee507eb624755e58e10d3e/Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4", size = 192701 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/b7/6ec57841fb67c98f52fc8e4a2d96df60059637cba077edc569a302a8ffc7/Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39", size = 235494 }, -] - -[[package]] -name = "zipp" -version = "3.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/27/f0ac6b846684cecce1ee93d32450c45ab607f65c2e0255f0092032d91f07/zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", size = 18454 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/fa/c9e82bbe1af6266adf08afb563905eb87cab83fde00a0a08963510621047/zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556", size = 6758 }, -] From 51f824625ff176f7b78591a6489512a1865173b8 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:22:51 -0700 Subject: [PATCH 20/23] Drop python 3.8 dependency --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f98fd55..88d1302 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 From 2fcc2af2d9e318c933614d607fe4e7a02ac902c2 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:23:43 -0700 Subject: [PATCH 21/23] Don't ignore the lock files --- .gitignore | 1 - 1 file changed, 1 deletion(-) delete mode 100644 .gitignore diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 07df930..0000000 --- a/.gitignore +++ /dev/null @@ -1 +0,0 @@ -uv.lock From 248c08097fad0283afdff09deb008c347e2959d9 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:25:38 -0700 Subject: [PATCH 22/23] Update the actions to use pdm --- .github/workflows/main.yml | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 88d1302..67c64bd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,17 +20,16 @@ jobs: - uses: actions/checkout@v4 with: persist-credentials: false - - name: Install uv - uses: astral-sh/setup-uv@v3 + - name: Install pdm + uses: pdm-project/setup-pdm@v4 - name: Install dependencies - env: - UV_PYTHON: "python${{ matrix.python-version }}" run: | - uv sync --frozen --no-sources + pdm venv create + pdm sync --only-keep --dev - name: Run tests - run: uv run invoke tests + run: pdm run invoke tests lint: name: Lint @@ -46,17 +45,16 @@ jobs: with: retry: true - - name: Install uv - uses: astral-sh/setup-uv@v3 + - name: Install pdm + uses: pdm-project/setup-pdm@v4 - name: Install dependencies - env: - UV_PYTHON: "python3.10" run: | - uv sync --frozen --no-sources + pdm venv create + pdm sync --only-keep --dev - name: Run linters - run: uv run invoke lint --diff + run: pdm run invoke lint --diff deploy: name: Deploy From 7e32761f4c9887ee13fad34da24842a77dea6e40 Mon Sep 17 00:00:00 2001 From: Chris Rose Date: Sat, 2 Nov 2024 17:35:14 -0700 Subject: [PATCH 23/23] Iterating on pdm venv setup --- .github/workflows/main.yml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 67c64bd..61ddedd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -22,11 +22,15 @@ jobs: persist-credentials: false - name: Install pdm uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + cache: true + cache-dependency-path: ./pyproject.toml + version: "2.20.0" - name: Install dependencies run: | - pdm venv create - pdm sync --only-keep --dev + pdm install - name: Run tests run: pdm run invoke tests @@ -47,11 +51,13 @@ jobs: - name: Install pdm uses: pdm-project/setup-pdm@v4 + with: + python-version: "3.10" + version: "2.20.0" - name: Install dependencies run: | - pdm venv create - pdm sync --only-keep --dev + pdm install - name: Run linters run: pdm run invoke lint --diff