From ce44f98c9a68efc958de7ac4d4d966dd8ec79b3b Mon Sep 17 00:00:00 2001 From: Erik Sundell Date: Sun, 23 Oct 2022 17:57:07 +0200 Subject: [PATCH 1/7] pre-commit: enable pyupgrade --- .pre-commit-config.yaml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aaf7a3305..42d13f19e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,13 +9,17 @@ # - Register git hooks: pre-commit install --install-hooks # repos: - # # Autoformat: Python code, syntax patterns are modernized - # - repo: https://github.com/asottile/pyupgrade - # rev: v3.0.0 - # hooks: - # - id: pyupgrade - # args: - # - --py38-plus + # Autoformat: Python code, syntax patterns are modernized + - repo: https://github.com/asottile/pyupgrade + rev: v3.2.0 + hooks: + - id: pyupgrade + args: + - --py36-plus + # check-tmp is a Python based test script run in created environments + # that can be at least Python 3.5 even though we require Python 3.6 for + # repo2docker itself. + exclude: check-tmp # Autoformat: Python code - repo: https://github.com/psf/black From 5a93542321d755330fe70bc2d5393928dbce372a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 23 Oct 2022 16:09:11 +0000 Subject: [PATCH 2/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- repo2docker/__main__.py | 2 +- repo2docker/_version.py | 16 +++---- repo2docker/app.py | 4 +- repo2docker/buildpacks/base.py | 9 ++-- repo2docker/buildpacks/conda/__init__.py | 4 +- repo2docker/buildpacks/docker.py | 3 +- repo2docker/buildpacks/legacy/__init__.py | 2 +- repo2docker/buildpacks/python/__init__.py | 6 +-- repo2docker/buildpacks/r.py | 4 +- repo2docker/contentproviders/dataverse.py | 7 ++- repo2docker/contentproviders/doi.py | 18 +++---- repo2docker/contentproviders/figshare.py | 5 +- repo2docker/contentproviders/git.py | 19 ++++---- repo2docker/contentproviders/hydroshare.py | 6 +-- repo2docker/contentproviders/mercurial.py | 10 ++-- repo2docker/contentproviders/swhid.py | 16 +++---- repo2docker/contentproviders/zenodo.py | 7 +-- repo2docker/engine.py | 2 +- tests/conftest.py | 2 +- tests/memlimit/dockerfile/postBuild | 2 +- tests/memlimit/non-dockerfile/postBuild | 2 +- tests/unit/contentproviders/test_dataverse.py | 6 +-- tests/unit/contentproviders/test_doi.py | 2 +- tests/unit/contentproviders/test_figshare.py | 24 ++++------ .../unit/contentproviders/test_hydroshare.py | 6 +-- tests/unit/contentproviders/test_swhid.py | 14 +++--- tests/unit/contentproviders/test_zenodo.py | 28 +++++------ tests/unit/test_args.py | 2 +- tests/unit/test_connect_url.py | 4 +- tests/unit/test_env.py | 4 +- tests/unit/test_ports.py | 4 +- tests/unit/test_r.py | 7 +-- tests/unit/test_users.py | 4 +- tests/unit/test_utils.py | 2 +- tests/unit/test_volumes.py | 6 +-- versioneer.py | 47 +++++++++---------- 36 files changed, 139 insertions(+), 167 deletions(-) diff --git a/repo2docker/__main__.py b/repo2docker/__main__.py index 1b52b1aee..c3e7ef7e5 100644 --- a/repo2docker/__main__.py +++ b/repo2docker/__main__.py @@ -52,7 +52,7 @@ def __call__(self, parser, namespace, values, option_string=None): # key pass using current value, or don't pass if "=" not in values: try: - value_to_append = "{}={}".format(values, os.environ[values]) + value_to_append = f"{values}={os.environ[values]}" except KeyError: # no local def, so don't pass return diff --git a/repo2docker/_version.py b/repo2docker/_version.py index 442840369..aca7b127e 100644 --- a/repo2docker/_version.py +++ b/repo2docker/_version.py @@ -84,7 +84,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= stderr=(subprocess.PIPE if hide_stderr else None), ) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -94,7 +94,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= return None, None else: if verbose: - print("unable to find command, tried %s" % (commands,)) + print(f"unable to find command, tried {commands}") return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: @@ -147,7 +147,7 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") + f = open(versionfile_abs) for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) @@ -162,7 +162,7 @@ def git_get_keywords(versionfile_abs): if mo: keywords["date"] = mo.group(1) f.close() - except EnvironmentError: + except OSError: pass return keywords @@ -186,11 +186,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -199,7 +199,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -302,7 +302,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( full_tag, tag_prefix, ) diff --git a/repo2docker/app.py b/repo2docker/app.py index e11f36b6c..718b12b29 100755 --- a/repo2docker/app.py +++ b/repo2docker/app.py @@ -568,7 +568,7 @@ def push_image(self): ) last_emit_time = time.time() self.log.info( - "Successfully pushed {}".format(self.output_image_spec), + f"Successfully pushed {self.output_image_spec}", extra=dict(phase=R2dState.PUSHING), ) @@ -767,7 +767,7 @@ def build(self): self.subdir, extra=dict(phase=R2dState.FAILED), ) - raise FileNotFoundError("Could not find {}".format(checkout_path)) + raise FileNotFoundError(f"Could not find {checkout_path}") with chdir(checkout_path): for BP in self.buildpacks: diff --git a/repo2docker/buildpacks/base.py b/repo2docker/buildpacks/base.py index 57ed2f353..16476e9d8 100644 --- a/repo2docker/buildpacks/base.py +++ b/repo2docker/buildpacks/base.py @@ -462,7 +462,7 @@ def render(self, build_args=None): last_user = "root" for user, script in self.get_build_scripts(): if last_user != user: - build_script_directives.append("USER {}".format(user)) + build_script_directives.append(f"USER {user}") last_user = user build_script_directives.append( "RUN {}".format(textwrap.dedent(script.strip("\n"))) @@ -472,7 +472,7 @@ def render(self, build_args=None): last_user = "root" for user, script in self.get_assemble_scripts(): if last_user != user: - assemble_script_directives.append("USER {}".format(user)) + assemble_script_directives.append(f"USER {user}") last_user = user assemble_script_directives.append( "RUN {}".format(textwrap.dedent(script.strip("\n"))) @@ -482,7 +482,7 @@ def render(self, build_args=None): last_user = "root" for user, script in self.get_preassemble_scripts(): if last_user != user: - preassemble_script_directives.append("USER {}".format(user)) + preassemble_script_directives.append(f"USER {user}") last_user = user preassemble_script_directives.append( "RUN {}".format(textwrap.dedent(script.strip("\n"))) @@ -616,8 +616,7 @@ def _filter_tar(tar): build_kwargs.update(extra_build_kwargs) - for line in client.build(**build_kwargs): - yield line + yield from client.build(**build_kwargs) class BaseImage(BuildPack): diff --git a/repo2docker/buildpacks/conda/__init__.py b/repo2docker/buildpacks/conda/__init__.py index 4408ad8ed..662e70bfe 100644 --- a/repo2docker/buildpacks/conda/__init__.py +++ b/repo2docker/buildpacks/conda/__init__.py @@ -377,7 +377,7 @@ def get_env_scripts(self): r""" echo auth-none=1 >> /etc/rstudio/rserver.conf && \ echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \ - echo "rsession-which-r={0}/bin/R" >> /etc/rstudio/rserver.conf && \ + echo "rsession-which-r={}/bin/R" >> /etc/rstudio/rserver.conf && \ echo www-frame-origin=same >> /etc/rstudio/rserver.conf """.format( env_prefix @@ -387,7 +387,7 @@ def get_env_scripts(self): "${NB_USER}", # Register the jupyter kernel r""" - R --quiet -e "IRkernel::installspec(prefix='{0}')" + R --quiet -e "IRkernel::installspec(prefix='{}')" """.format( env_prefix ), diff --git a/repo2docker/buildpacks/docker.py b/repo2docker/buildpacks/docker.py index 5f0c2fb33..e0ccfc95c 100644 --- a/repo2docker/buildpacks/docker.py +++ b/repo2docker/buildpacks/docker.py @@ -57,5 +57,4 @@ def build( build_kwargs.update(extra_build_kwargs) - for line in client.build(**build_kwargs): - yield line + yield from client.build(**build_kwargs) diff --git a/repo2docker/buildpacks/legacy/__init__.py b/repo2docker/buildpacks/legacy/__init__.py index 7f5d004f6..a1b91e026 100644 --- a/repo2docker/buildpacks/legacy/__init__.py +++ b/repo2docker/buildpacks/legacy/__init__.py @@ -20,7 +20,7 @@ def detect(self): """Check if current repo should be built with the Legacy BuildPack.""" log = logging.getLogger("repo2docker") try: - with open("Dockerfile", "r") as f: + with open("Dockerfile") as f: for line in f: if line.startswith("FROM"): if "andrewosh/binder-base" in line.split("#")[0].lower(): diff --git a/repo2docker/buildpacks/python/__init__.py b/repo2docker/buildpacks/python/__init__.py index 7ea2a8264..0c12f91b8 100644 --- a/repo2docker/buildpacks/python/__init__.py +++ b/repo2docker/buildpacks/python/__init__.py @@ -67,7 +67,7 @@ def _get_pip_scripts(self): scripts.append( ( "${NB_USER}", - '{} install --no-cache-dir -r "{}"'.format(pip, requirements_file), + f'{pip} install --no-cache-dir -r "{requirements_file}"', ) ) return scripts @@ -126,9 +126,7 @@ def get_assemble_scripts(self): # setup.py exists *and* binder dir is not used if not self.binder_dir and os.path.exists(setup_py): - assemble_scripts.append( - ("${NB_USER}", "{} install --no-cache-dir .".format(pip)) - ) + assemble_scripts.append(("${NB_USER}", f"{pip} install --no-cache-dir .")) return assemble_scripts def detect(self): diff --git a/repo2docker/buildpacks/r.py b/repo2docker/buildpacks/r.py index eaf44b2b5..9009a4d87 100644 --- a/repo2docker/buildpacks/r.py +++ b/repo2docker/buildpacks/r.py @@ -139,7 +139,7 @@ def detect(self): self._checkpoint_date = datetime.date.today() - datetime.timedelta( days=2 ) - self._runtime = "r-{}".format(str(self._checkpoint_date)) + self._runtime = f"r-{str(self._checkpoint_date)}" return True def get_env(self): @@ -223,7 +223,7 @@ def get_mran_snapshot_url(self, snapshot_date, max_days_prior=7): for i in range(max_days_prior): try_date = snapshot_date - datetime.timedelta(days=i) # Fall back to MRAN if packagemanager.rstudio.com doesn't have it - url = "https://mran.microsoft.com/snapshot/{}".format(try_date.isoformat()) + url = f"https://mran.microsoft.com/snapshot/{try_date.isoformat()}" r = requests.head(url) if r.ok: return url diff --git a/repo2docker/contentproviders/dataverse.py b/repo2docker/contentproviders/dataverse.py index a1c57caa9..c308c2d09 100644 --- a/repo2docker/contentproviders/dataverse.py +++ b/repo2docker/contentproviders/dataverse.py @@ -20,7 +20,7 @@ class Dataverse(DoiProvider): def __init__(self): data_file = os.path.join(os.path.dirname(__file__), "dataverse.json") - with open(data_file, "r") as fp: + with open(data_file) as fp: self.hosts = json.load(fp)["installations"] super().__init__() @@ -97,7 +97,7 @@ def fetch(self, spec, output_dir, yield_output=False): record_id = spec["record"] host = spec["host"] - yield "Fetching Dataverse record {}.\n".format(record_id) + yield f"Fetching Dataverse record {record_id}.\n" url = "{}/api/datasets/:persistentId?persistentId={}".format( host["url"], record_id ) @@ -114,8 +114,7 @@ def fetch(self, spec, output_dir, yield_output=False): file_ref = {"download": file_url, "filename": filename} fetch_map = {key: key for key in file_ref.keys()} - for line in self.fetch_file(file_ref, fetch_map, output_dir): - yield line + yield from self.fetch_file(file_ref, fetch_map, output_dir) new_subdirs = os.listdir(output_dir) # if there is only one new subdirectory move its contents diff --git a/repo2docker/contentproviders/doi.py b/repo2docker/contentproviders/doi.py index c1941ba83..01d048ec7 100644 --- a/repo2docker/contentproviders/doi.py +++ b/repo2docker/contentproviders/doi.py @@ -23,7 +23,7 @@ def __init__(self): self.session = Session() self.session.headers.update( { - "user-agent": "repo2docker {}".format(__version__), + "user-agent": f"repo2docker {__version__}", } ) @@ -38,7 +38,7 @@ def _urlopen(self, req, headers=None): if not isinstance(req, request.Request): req = request.Request(req) - req.add_header("User-Agent", "repo2docker {}".format(__version__)) + req.add_header("User-Agent", f"repo2docker {__version__}") if headers is not None: for key, value in headers.items(): req.add_header(key, value) @@ -52,7 +52,7 @@ def doi2url(self, doi): doi = normalize_doi(doi) try: - resp = self._request("https://doi.org/{}".format(doi)) + resp = self._request(f"https://doi.org/{doi}") resp.raise_for_status() # If the DOI doesn't resolve, just return URL except HTTPError: @@ -67,26 +67,26 @@ def fetch_file(self, file_ref, host, output_dir, unzip=False): # file related to a record file_url = deep_get(file_ref, host["download"]) fname = deep_get(file_ref, host["filename"]) - logging.debug("Downloading file {} as {}\n".format(file_url, fname)) + logging.debug(f"Downloading file {file_url} as {fname}\n") - yield "Requesting {}\n".format(file_url) + yield f"Requesting {file_url}\n" resp = self._request(file_url, stream=True) resp.raise_for_status() if path.dirname(fname): sub_dir = path.join(output_dir, path.dirname(fname)) if not path.exists(sub_dir): - yield "Creating {}\n".format(sub_dir) + yield f"Creating {sub_dir}\n" makedirs(sub_dir, exist_ok=True) dst_fname = path.join(output_dir, fname) with open(dst_fname, "wb") as dst: - yield "Fetching {}\n".format(fname) + yield f"Fetching {fname}\n" for chunk in resp.iter_content(chunk_size=None): dst.write(chunk) if unzip and is_zipfile(dst_fname): - yield "Extracting {}\n".format(fname) + yield f"Extracting {fname}\n" zfile = ZipFile(dst_fname) zfile.extractall(path=output_dir) zfile.close() @@ -106,4 +106,4 @@ def fetch_file(self, file_ref, host, output_dir, unzip=False): copytree(path.join(output_dir, d), output_dir) shutil.rmtree(path.join(output_dir, d)) - yield "Fetched files: {}\n".format(os.listdir(output_dir)) + yield f"Fetched files: {os.listdir(output_dir)}\n" diff --git a/repo2docker/contentproviders/figshare.py b/repo2docker/contentproviders/figshare.py index 4b1517fc9..5d27684d1 100644 --- a/repo2docker/contentproviders/figshare.py +++ b/repo2docker/contentproviders/figshare.py @@ -91,10 +91,9 @@ def fetch(self, spec, output_dir, yield_output=False): only_one_file = len(files) == 1 for file_ref in files: unzip = file_ref["name"].endswith(".zip") and only_one_file - for line in self.fetch_file(file_ref, host, output_dir, unzip): - yield line + yield from self.fetch_file(file_ref, host, output_dir, unzip) @property def content_id(self): """The Figshare article ID""" - return "{}.v{}".format(self.article_id, self.article_version) + return f"{self.article_id}.v{self.article_version}" diff --git a/repo2docker/contentproviders/git.py b/repo2docker/contentproviders/git.py index 345298c76..9e5fb48e7 100644 --- a/repo2docker/contentproviders/git.py +++ b/repo2docker/contentproviders/git.py @@ -29,13 +29,12 @@ def fetch(self, spec, output_dir, yield_output=False): # this prevents HEAD's submodules to be cloned if ref doesn't have them cmd.extend(["--no-checkout"]) cmd.extend([repo, output_dir]) - for line in execute_cmd(cmd, capture=yield_output): - yield line + yield from execute_cmd(cmd, capture=yield_output) except subprocess.CalledProcessError as e: - msg = "Failed to clone repository from {repo}".format(repo=repo) + msg = f"Failed to clone repository from {repo}" if ref != "HEAD": - msg += " (ref {ref})".format(ref=ref) + msg += f" (ref {ref})" msg += "." raise ContentProviderException(msg) from e @@ -54,23 +53,21 @@ def fetch(self, spec, output_dir, yield_output=False): "specifying `--ref`." ) else: - msg = "Failed to check out ref {}".format(ref) + msg = f"Failed to check out ref {ref}" raise ValueError(msg) # We don't need to explicitly checkout things as the reset will # take care of that. If the hash is resolved above, we should be # able to reset to it - for line in execute_cmd( + yield from execute_cmd( ["git", "reset", "--hard", hash], cwd=output_dir, capture=yield_output - ): - yield line + ) # ensure that git submodules are initialised and updated - for line in execute_cmd( + yield from execute_cmd( ["git", "submodule", "update", "--init", "--recursive"], cwd=output_dir, capture=yield_output, - ): - yield line + ) cmd = ["git", "rev-parse", "HEAD"] sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir) diff --git a/repo2docker/contentproviders/hydroshare.py b/repo2docker/contentproviders/hydroshare.py index 1cb8b6975..3378129ab 100755 --- a/repo2docker/contentproviders/hydroshare.py +++ b/repo2docker/contentproviders/hydroshare.py @@ -61,7 +61,7 @@ def fetch(self, spec, output_dir, yield_output=False, timeout=120): bag_url = "{}{}".format(host["django_irods"], resource_id) - yield "Downloading {}.\n".format(bag_url) + yield f"Downloading {bag_url}.\n" # bag downloads are prepared on demand and may need some time conn = self.urlopen(bag_url) @@ -82,7 +82,7 @@ def fetch(self, spec, output_dir, yield_output=False, timeout=120): time.sleep(wait_time) conn = self.urlopen(bag_url) if conn.status_code != 200: - msg = "Failed to download bag. status code {}.\n".format(conn.status_code) + msg = f"Failed to download bag. status code {conn.status_code}.\n" yield msg raise ContentProviderException(msg) # Bag creation seems to need a small time buffer after it says it's ready. @@ -102,4 +102,4 @@ def fetch(self, spec, output_dir, yield_output=False, timeout=120): @property def content_id(self): """The HydroShare resource ID""" - return "{}.v{}".format(self.resource_id, self.version) + return f"{self.resource_id}.v{self.version}" diff --git a/repo2docker/contentproviders/mercurial.py b/repo2docker/contentproviders/mercurial.py index 60d5f2fd8..e29584f29 100644 --- a/repo2docker/contentproviders/mercurial.py +++ b/repo2docker/contentproviders/mercurial.py @@ -41,8 +41,7 @@ def fetch(self, spec, output_dir, yield_output=False): # don't update so the clone will include an empty working # directory, the given ref will be updated out later cmd.extend(["--noupdate"]) - for line in execute_cmd(cmd, capture=yield_output): - yield line + yield from execute_cmd(cmd, capture=yield_output) except subprocess.CalledProcessError as error: msg = f"Failed to clone repository from {repo}" @@ -54,17 +53,16 @@ def fetch(self, spec, output_dir, yield_output=False): # check out the specific ref given by the user if ref is not None: try: - for line in execute_cmd( + yield from execute_cmd( ["hg", "update", "--clean", ref] + args_enabling_topic, cwd=output_dir, capture=yield_output, - ): - yield line + ) except subprocess.CalledProcessError: self.log.error( "Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED) ) - raise ValueError("Failed to update to ref {}".format(ref)) + raise ValueError(f"Failed to update to ref {ref}") cmd = ["hg", "identify", "-i"] + args_enabling_topic sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir) diff --git a/repo2docker/contentproviders/swhid.py b/repo2docker/contentproviders/swhid.py index e20501770..2a964e70f 100644 --- a/repo2docker/contentproviders/swhid.py +++ b/repo2docker/contentproviders/swhid.py @@ -34,12 +34,12 @@ def __init__(self): self.session = requests.Session() self.session.headers.update( { - "user-agent": "repo2docker {}".format(__version__), + "user-agent": f"repo2docker {__version__}", } ) def set_auth_token(self, token): - header = {"Authorization": "Bearer {}".format(token)} + header = {"Authorization": f"Bearer {token}"} self.session.headers.update(header) def _request(self, url, method="GET"): @@ -72,8 +72,8 @@ def detect(self, swhid, ref=None, extra_args=None): return {"swhid": swhid, "swhid_obj": swhid_dict} def fetch_directory(self, dir_hash, output_dir): - url = "{}/vault/directory/{}/".format(self.base_url, dir_hash) - yield "Fetching directory {} from {}\n".format(dir_hash, url) + url = f"{self.base_url}/vault/directory/{dir_hash}/" + yield f"Fetching directory {dir_hash} from {url}\n" resp = self._request(url, "POST") receipt = resp.json() status = receipt["status"] @@ -92,7 +92,7 @@ def fetch_directory(self, dir_hash, output_dir): # move its content one level up copytree(path.join(output_dir, dir_hash), output_dir) shutil.rmtree(path.join(output_dir, dir_hash)) - yield "Fetched files: {}\n".format(os.listdir(output_dir)) + yield f"Fetched files: {os.listdir(output_dir)}\n" def fetch(self, spec, output_dir, yield_output=False): swhid = spec["swhid"] @@ -101,12 +101,12 @@ def fetch(self, spec, output_dir, yield_output=False): if swhid_obj["type"] == "rev": # need to get the directory for this revision sha1git = swhid_obj["hash"] - url = "{}/revision/{}/".format(self.base_url, sha1git) - yield "Fetching revision {} from {}\n".format(sha1git, url) + url = f"{self.base_url}/revision/{sha1git}/" + yield f"Fetching revision {sha1git} from {url}\n" resp = self._request(url) assert resp.ok, (resp.content, self.session.headers) directory = resp.json()["directory"] - self.swhid = "swh:1:dir:{}".format(directory) + self.swhid = f"swh:1:dir:{directory}" yield from self.fetch_directory(directory, output_dir) elif swhid_obj["type"] == "dir": self.swhid = swhid diff --git a/repo2docker/contentproviders/zenodo.py b/repo2docker/contentproviders/zenodo.py index 7a5d09325..a58e295f8 100644 --- a/repo2docker/contentproviders/zenodo.py +++ b/repo2docker/contentproviders/zenodo.py @@ -66,7 +66,7 @@ def fetch(self, spec, output_dir, yield_output=False): record_id = spec["record"] host = spec["host"] - yield "Fetching Zenodo record {}.\n".format(record_id) + yield f"Fetching Zenodo record {record_id}.\n" resp = self.urlopen( "{}{}".format(host["api"], record_id), headers={"accept": "application/json"}, @@ -77,10 +77,7 @@ def fetch(self, spec, output_dir, yield_output=False): files = deep_get(record, host["filepath"]) only_one_file = len(files) == 1 for file_ref in files: - for line in self.fetch_file( - file_ref, host, output_dir, unzip=only_one_file - ): - yield line + yield from self.fetch_file(file_ref, host, output_dir, unzip=only_one_file) @property def content_id(self): diff --git a/repo2docker/engine.py b/repo2docker/engine.py index 9b5e92eb2..459b2be83 100644 --- a/repo2docker/engine.py +++ b/repo2docker/engine.py @@ -131,7 +131,7 @@ def config(self): return self._config def __repr__(self): - return "Image(tags={},config={})".format(self.tags, self.config) + return f"Image(tags={self.tags},config={self.config})" class ContainerEngine(LoggingConfigurable): diff --git a/tests/conftest.py b/tests/conftest.py index 19c0ca6ec..eda58223f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -212,7 +212,7 @@ def repr_failure(self, excinfo): err = excinfo.value if isinstance(err, SystemExit): cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args)) - return "%s | exited with status=%s" % (cmd, err.code) + return f"{cmd} | exited with status={err.code}" else: return super().repr_failure(excinfo) diff --git a/tests/memlimit/dockerfile/postBuild b/tests/memlimit/dockerfile/postBuild index af9f9a3b4..4fe5360f2 100755 --- a/tests/memlimit/dockerfile/postBuild +++ b/tests/memlimit/dockerfile/postBuild @@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f: mem_allocate_mb = int(f.read().strip()) size = 1024 * 1024 * mem_allocate_mb -print("trying to allocate {}MB".format(mem_allocate_mb)) +print(f"trying to allocate {mem_allocate_mb}MB") ret = libc.malloc(size) diff --git a/tests/memlimit/non-dockerfile/postBuild b/tests/memlimit/non-dockerfile/postBuild index af9f9a3b4..4fe5360f2 100755 --- a/tests/memlimit/non-dockerfile/postBuild +++ b/tests/memlimit/non-dockerfile/postBuild @@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f: mem_allocate_mb = int(f.read().strip()) size = 1024 * 1024 * mem_allocate_mb -print("trying to allocate {}MB".format(mem_allocate_mb)) +print(f"trying to allocate {mem_allocate_mb}MB") ret = libc.malloc(size) diff --git a/tests/unit/contentproviders/test_dataverse.py b/tests/unit/contentproviders/test_dataverse.py index 3d912a7b9..63a584716 100644 --- a/tests/unit/contentproviders/test_dataverse.py +++ b/tests/unit/contentproviders/test_dataverse.py @@ -12,8 +12,8 @@ test_dv = Dataverse() -harvard_dv = next((_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse")) -cimmyt_dv = next((_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data")) +harvard_dv = next(_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse") +cimmyt_dv = next(_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data") test_hosts = [ ( [ @@ -153,7 +153,7 @@ def mock_filecontent(req, context): for l in dv.fetch(spec, d): output.append(l) unpacked_files = set(os.listdir(d)) - expected = set(["directory", "some-file.txt"]) + expected = {"directory", "some-file.txt"} assert expected == unpacked_files assert os.path.isfile( os.path.join(d, "directory", "subdirectory", "the-other-file.txt") diff --git a/tests/unit/contentproviders/test_doi.py b/tests/unit/contentproviders/test_doi.py index dbe391604..ed2dcb736 100644 --- a/tests/unit/contentproviders/test_doi.py +++ b/tests/unit/contentproviders/test_doi.py @@ -27,7 +27,7 @@ def test_url_headers(requests_mock): result = doi.urlopen("https://mybinder.org", headers=headers) assert "test1" in result.request.headers assert "Test2" in result.request.headers - assert result.request.headers["User-Agent"] == "repo2docker {}".format(__version__) + assert result.request.headers["User-Agent"] == f"repo2docker {__version__}" def test_unresolving_doi(): diff --git a/tests/unit/contentproviders/test_figshare.py b/tests/unit/contentproviders/test_figshare.py index 7d29fbcbc..3de67e985 100644 --- a/tests/unit/contentproviders/test_figshare.py +++ b/tests/unit/contentproviders/test_figshare.py @@ -113,8 +113,8 @@ def test_detect_not_figshare(): def figshare_archive(prefix="a_directory"): with NamedTemporaryFile(suffix=".zip") as zfile: with ZipFile(zfile.name, mode="w") as zip: - zip.writestr("{}/some-file.txt".format(prefix), "some content") - zip.writestr("{}/some-other-file.txt".format(prefix), "some more content") + zip.writestr(f"{prefix}/some-file.txt", "some content") + zip.writestr(f"{prefix}/some-other-file.txt", "some more content") yield zfile.name @@ -127,7 +127,7 @@ def test_fetch_zip(requests_mock): { "name": "afake.zip", "is_link_only": False, - "download_url": "file://{}".format(fig_path), + "download_url": f"file://{fig_path}", } ] } @@ -135,9 +135,7 @@ def test_fetch_zip(requests_mock): "https://api.figshare.com/v2/articles/123456/versions/42", json=mock_response, ) - requests_mock.get( - "file://{}".format(fig_path), content=open(fig_path, "rb").read() - ) + requests_mock.get(f"file://{fig_path}", content=open(fig_path, "rb").read()) # with patch.object(Figshare, "urlopen", new=mock_urlopen): with TemporaryDirectory() as d: @@ -146,7 +144,7 @@ def test_fetch_zip(requests_mock): output.append(l) unpacked_files = set(os.listdir(d)) - expected = set(["some-other-file.txt", "some-file.txt"]) + expected = {"some-other-file.txt", "some-file.txt"} assert expected == unpacked_files @@ -157,12 +155,12 @@ def test_fetch_data(requests_mock): "files": [ { "name": "afake.file", - "download_url": "file://{}".format(a_path), + "download_url": f"file://{a_path}", "is_link_only": False, }, { "name": "bfake.data", - "download_url": "file://{}".format(b_path), + "download_url": f"file://{b_path}", "is_link_only": False, }, {"name": "cfake.link", "is_link_only": True}, @@ -173,12 +171,8 @@ def test_fetch_data(requests_mock): "https://api.figshare.com/v2/articles/123456/versions/42", json=mock_response, ) - requests_mock.get( - "file://{}".format(a_path), content=open(a_path, "rb").read() - ) - requests_mock.get( - "file://{}".format(b_path), content=open(b_path, "rb").read() - ) + requests_mock.get(f"file://{a_path}", content=open(a_path, "rb").read()) + requests_mock.get(f"file://{b_path}", content=open(b_path, "rb").read()) with TemporaryDirectory() as d: output = [] diff --git a/tests/unit/contentproviders/test_hydroshare.py b/tests/unit/contentproviders/test_hydroshare.py index da80905fb..b0211c52f 100755 --- a/tests/unit/contentproviders/test_hydroshare.py +++ b/tests/unit/contentproviders/test_hydroshare.py @@ -103,8 +103,8 @@ def test_detect_hydroshare(requests_mock): def hydroshare_archive(prefix="b8f6eae9d89241cf8b5904033460af61/data/contents"): with NamedTemporaryFile(suffix=".zip") as zfile: with ZipFile(zfile.name, mode="w") as zip: - zip.writestr("{}/some-file.txt".format(prefix), "some content") - zip.writestr("{}/some-other-file.txt".format(prefix), "some more content") + zip.writestr(f"{prefix}/some-file.txt", "some content") + zip.writestr(f"{prefix}/some-other-file.txt", "some more content") yield zfile @@ -149,7 +149,7 @@ def test_fetch_bag(): output.append(l) unpacked_files = set(os.listdir(d)) - expected = set(["some-other-file.txt", "some-file.txt"]) + expected = {"some-other-file.txt", "some-file.txt"} assert expected == unpacked_files diff --git a/tests/unit/contentproviders/test_swhid.py b/tests/unit/contentproviders/test_swhid.py index 953218e3c..48c1fa31e 100644 --- a/tests/unit/contentproviders/test_swhid.py +++ b/tests/unit/contentproviders/test_swhid.py @@ -99,7 +99,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf): adapter.register_uri( "GET", - "mock://api/1/revision/{}/".format(NULLID), + f"mock://api/1/revision/{NULLID}/", json={ "author": {"fullname": "John Doe "}, "directory": dirhash, @@ -107,25 +107,25 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf): ) adapter.register_uri( "POST", - "mock://api/1/vault/directory/{}/".format(dirhash), + f"mock://api/1/vault/directory/{dirhash}/", json={ - "fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash), + "fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/", "status": "new", }, ) adapter.register_uri( "GET", - "mock://api/1/vault/directory/{}/".format(dirhash), + f"mock://api/1/vault/directory/{dirhash}/", [ { "json": { - "fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash), + "fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/", "status": "pending", } }, { "json": { - "fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash), + "fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/", "status": "done", } }, @@ -133,7 +133,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf): ) adapter.register_uri( "GET", - "mock://api/1/vault/directory/{}/raw/".format(dirhash), + f"mock://api/1/vault/directory/{dirhash}/raw/", content=tarfile_buf, ) return provider diff --git a/tests/unit/contentproviders/test_zenodo.py b/tests/unit/contentproviders/test_zenodo.py index c46cf8d94..88e143c60 100644 --- a/tests/unit/contentproviders/test_zenodo.py +++ b/tests/unit/contentproviders/test_zenodo.py @@ -82,8 +82,8 @@ def test_detect_zenodo(test_input, expected, requests_mock): def zenodo_archive(prefix="a_directory"): with NamedTemporaryFile(suffix=".zip") as zfile: with ZipFile(zfile.name, mode="w") as zip: - zip.writestr("{}/some-file.txt".format(prefix), "some content") - zip.writestr("{}/some-other-file.txt".format(prefix), "some more content") + zip.writestr(f"{prefix}/some-file.txt", "some content") + zip.writestr(f"{prefix}/some-other-file.txt", "some more content") yield zfile.name @@ -96,15 +96,13 @@ def test_fetch_software_from_github_archive(requests_mock): "files": [ { "filename": "some_dir/afake.zip", - "links": {"download": "file://{}".format(zen_path)}, + "links": {"download": f"file://{zen_path}"}, } ], "metadata": {"upload_type": "other"}, } requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response) - requests_mock.get( - "file://{}".format(zen_path), content=open(zen_path, "rb").read() - ) + requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read()) zen = Zenodo() spec = {"host": test_zen.hosts[1], "record": "1234"} @@ -115,7 +113,7 @@ def test_fetch_software_from_github_archive(requests_mock): output.append(l) unpacked_files = set(os.listdir(d)) - expected = set(["some-other-file.txt", "some-file.txt"]) + expected = {"some-other-file.txt", "some-file.txt"} assert expected == unpacked_files @@ -129,15 +127,13 @@ def test_fetch_software(requests_mock): # this is the difference to the GitHub generated one, # the ZIP file isn't in a directory "filename": "afake.zip", - "links": {"download": "file://{}".format(zen_path)}, + "links": {"download": f"file://{zen_path}"}, } ], "metadata": {"upload_type": "software"}, } requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response) - requests_mock.get( - "file://{}".format(zen_path), content=open(zen_path, "rb").read() - ) + requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read()) with TemporaryDirectory() as d: zen = Zenodo() @@ -147,7 +143,7 @@ def test_fetch_software(requests_mock): output.append(l) unpacked_files = set(os.listdir(d)) - expected = set(["some-other-file.txt", "some-file.txt"]) + expected = {"some-other-file.txt", "some-file.txt"} assert expected == unpacked_files @@ -159,21 +155,21 @@ def test_fetch_data(requests_mock): "files": [ { "filename": "afake.zip", - "links": {"download": "file://{}".format(a_zen_path)}, + "links": {"download": f"file://{a_zen_path}"}, }, { "filename": "bfake.zip", - "links": {"download": "file://{}".format(b_zen_path)}, + "links": {"download": f"file://{b_zen_path}"}, }, ], "metadata": {"upload_type": "data"}, } requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response) requests_mock.get( - "file://{}".format(a_zen_path), content=open(a_zen_path, "rb").read() + f"file://{a_zen_path}", content=open(a_zen_path, "rb").read() ) requests_mock.get( - "file://{}".format(b_zen_path), content=open(b_zen_path, "rb").read() + f"file://{b_zen_path}", content=open(b_zen_path, "rb").read() ) with TemporaryDirectory() as d: diff --git a/tests/unit/test_args.py b/tests/unit/test_args.py index e5fd2c1ab..404ee796c 100644 --- a/tests/unit/test_args.py +++ b/tests/unit/test_args.py @@ -13,7 +13,7 @@ def test_version(capsys): """ with pytest.raises(SystemExit): make_r2d(["--version"]) - assert capsys.readouterr().out == "{}\n".format(__version__) + assert capsys.readouterr().out == f"{__version__}\n" def test_simple(): diff --git a/tests/unit/test_connect_url.py b/tests/unit/test_connect_url.py index 681cc6808..3115a8ab3 100644 --- a/tests/unit/test_connect_url.py +++ b/tests/unit/test_connect_url.py @@ -40,8 +40,8 @@ def test_connect_url(tmpdir): app.start() container = app.start_container() - container_url = "http://{}:{}/api".format(app.hostname, app.port) - expected_url = "http://{}:{}".format(app.hostname, app.port) + container_url = f"http://{app.hostname}:{app.port}/api" + expected_url = f"http://{app.hostname}:{app.port}" # wait a bit for the container to be ready # give the container a chance to start diff --git a/tests/unit/test_env.py b/tests/unit/test_env.py index ee01b3615..fde4aed43 100644 --- a/tests/unit/test_env.py +++ b/tests/unit/test_env.py @@ -30,7 +30,7 @@ def test_env(capfd): "repo2docker", # 'key=value' are exported as is in docker "-e", - "FOO={}".format(ts), + f"FOO={ts}", "--env", "BAR=baz", # 'key' is exported with the currently exported value @@ -65,7 +65,7 @@ def test_env(capfd): # stderr should contain lines of output declares = [x for x in captured.err.splitlines() if x.startswith("declare")] - assert 'declare -x FOO="{}"'.format(ts) in declares + assert f'declare -x FOO="{ts}"' in declares assert 'declare -x BAR="baz"' in declares assert 'declare -x SPAM="eggs"' in declares assert "declare -x NO_SPAM" not in declares diff --git a/tests/unit/test_ports.py b/tests/unit/test_ports.py index 09944e873..90148ba41 100644 --- a/tests/unit/test_ports.py +++ b/tests/unit/test_ports.py @@ -82,13 +82,13 @@ def _cleanup(): if all_ports: port = port_mapping["8888/tcp"][0]["HostPort"] - url = "http://{}:{}".format(host, port) + url = f"http://{host}:{port}" for i in range(5): try: r = requests.get(url) r.raise_for_status() except Exception as e: - print("No response from {}: {}".format(url, e)) + print(f"No response from {url}: {e}") container.reload() assert container.status == "running" time.sleep(3) diff --git a/tests/unit/test_r.py b/tests/unit/test_r.py index f13ee7396..3d141cfc7 100644 --- a/tests/unit/test_r.py +++ b/tests/unit/test_r.py @@ -87,6 +87,7 @@ def mock_request_head(url): with patch("requests.head", side_effect=mock_request_head): r = buildpacks.RBuildPack() - assert r.get_mran_snapshot_url( - requested - ) == "https://mran.microsoft.com/snapshot/{}".format(expected.isoformat()) + assert ( + r.get_mran_snapshot_url(requested) + == f"https://mran.microsoft.com/snapshot/{expected.isoformat()}" + ) diff --git a/tests/unit/test_users.py b/tests/unit/test_users.py index 2bceb5ff9..e14eeaf13 100644 --- a/tests/unit/test_users.py +++ b/tests/unit/test_users.py @@ -35,7 +35,7 @@ def test_user(): [ "repo2docker", "-v", - "{}:/home/{}".format(tmpdir, username), + f"{tmpdir}:/home/{username}", "--user-id", userid, "--user-name", @@ -53,7 +53,7 @@ def test_user(): with open(os.path.join(tmpdir, "id")) as f: assert f.read().strip() == userid with open(os.path.join(tmpdir, "pwd")) as f: - assert f.read().strip() == "/home/{}".format(username) + assert f.read().strip() == f"/home/{username}" with open(os.path.join(tmpdir, "name")) as f: assert f.read().strip() == username with open(os.path.join(tmpdir, "name")) as f: diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index ef9a8d38f..28401e33f 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -90,7 +90,7 @@ def test_invalid_port_mapping(port_spec): with pytest.raises(ValueError) as e: utils.validate_and_generate_port_mapping([port_spec]) - assert 'Port specification "{}"'.format(port_spec) in str(e.value) + assert f'Port specification "{port_spec}"' in str(e.value) def test_deep_get(): diff --git a/tests/unit/test_volumes.py b/tests/unit/test_volumes.py index db6efba2d..9e108ec82 100644 --- a/tests/unit/test_volumes.py +++ b/tests/unit/test_volumes.py @@ -22,7 +22,7 @@ def test_volume_abspath(): [ "repo2docker", "-v", - "{}:/home/{}".format(tmpdir, username), + f"{tmpdir}:/home/{username}", "--user-id", str(os.geteuid()), "--user-name", @@ -31,7 +31,7 @@ def test_volume_abspath(): "--", "/bin/bash", "-c", - "echo -n {} > ts".format(ts), + f"echo -n {ts} > ts", ] ) @@ -61,7 +61,7 @@ def test_volume_relpath(): "--", "/bin/bash", "-c", - "echo -n {} > ts".format(ts), + f"echo -n {ts} > ts", ] ) diff --git a/versioneer.py b/versioneer.py index 2b5454051..a47088994 100644 --- a/versioneer.py +++ b/versioneer.py @@ -275,7 +275,6 @@ """ -from __future__ import print_function try: import configparser @@ -344,7 +343,7 @@ def get_config_from_root(root): # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: + with open(setup_cfg) as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory @@ -404,7 +403,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= stderr=(subprocess.PIPE if hide_stderr else None), ) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -414,7 +413,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= return None, None else: if verbose: - print("unable to find command, tried %s" % (commands,)) + print(f"unable to find command, tried {commands}") return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: @@ -429,7 +428,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= LONG_VERSION_PY[ "git" -] = ''' +] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -961,7 +960,7 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") + f = open(versionfile_abs) for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) @@ -976,7 +975,7 @@ def git_get_keywords(versionfile_abs): if mo: keywords["date"] = mo.group(1) f.close() - except EnvironmentError: + except OSError: pass return keywords @@ -1000,11 +999,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -1013,7 +1012,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1116,7 +1115,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( full_tag, tag_prefix, ) @@ -1166,13 +1165,13 @@ def do_vcs_install(manifest_in, versionfile_source, ipy): files.append(versioneer_file) present = False try: - f = open(".gitattributes", "r") + f = open(".gitattributes") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() - except EnvironmentError: + except OSError: pass if not present: f = open(".gitattributes", "a+") @@ -1236,7 +1235,7 @@ def versions_from_file(filename): try: with open(filename) as f: contents = f.read() - except EnvironmentError: + except OSError: raise NotThisMethod("unable to read _version.py") mo = re.search( r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S @@ -1257,7 +1256,7 @@ def write_to_version_file(filename, versions): with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) - print("set %s to '%s'" % (filename, versions["version"])) + print("set {} to '{}'".format(filename, versions["version"])) def plus_or_dot(pieces): @@ -1482,7 +1481,7 @@ def get_versions(verbose=False): try: ver = versions_from_file(versionfile_abs) if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) + print(f"got version from file {versionfile_abs} {ver}") return ver except NotThisMethod: pass @@ -1755,11 +1754,7 @@ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) - except ( - EnvironmentError, - configparser.NoSectionError, - configparser.NoOptionError, - ) as e: + except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: @@ -1784,9 +1779,9 @@ def do_setup(): ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: - with open(ipy, "r") as f: + with open(ipy) as f: old = f.read() - except EnvironmentError: + except OSError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) @@ -1805,12 +1800,12 @@ def do_setup(): manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: - with open(manifest_in, "r") as f: + with open(manifest_in) as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) - except EnvironmentError: + except OSError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so @@ -1844,7 +1839,7 @@ def scan_setup_py(): found = set() setters = False errors = 0 - with open("setup.py", "r") as f: + with open("setup.py") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") From 15a1fb6e2d22c013523835280e19c186e605b05f Mon Sep 17 00:00:00 2001 From: Erik Sundell Date: Sun, 23 Oct 2022 19:18:37 +0200 Subject: [PATCH 3/7] refactor: manually add transitions to f-strings --- docs/source/conf.py | 6 ++-- repo2docker/__main__.py | 4 +-- repo2docker/_version.py | 9 +++--- repo2docker/app.py | 35 +++++++-------------- repo2docker/buildpacks/_r_base.py | 16 +++------- repo2docker/buildpacks/base.py | 7 ++--- repo2docker/buildpacks/conda/__init__.py | 36 +++++++++------------- repo2docker/buildpacks/docker.py | 4 +-- repo2docker/buildpacks/nix/__init__.py | 8 ++--- repo2docker/buildpacks/pipfile/__init__.py | 4 +-- repo2docker/buildpacks/python/__init__.py | 4 +-- repo2docker/buildpacks/r.py | 15 +++------ repo2docker/contentproviders/base.py | 4 +-- repo2docker/contentproviders/dataverse.py | 12 +++----- repo2docker/contentproviders/figshare.py | 6 ++-- repo2docker/contentproviders/git.py | 2 +- repo2docker/contentproviders/hydroshare.py | 6 ++-- repo2docker/contentproviders/mercurial.py | 2 +- repo2docker/contentproviders/zenodo.py | 2 +- repo2docker/utils.py | 18 +++++------ tests/conda/py2/verify | 2 +- tests/conftest.py | 8 ++--- tests/unit/test_connect_url.py | 4 +-- tests/unit/test_subdir.py | 2 +- tests/unit/test_users.py | 13 +++----- tests/venv/usr-bin/verify | 2 +- versioneer.py | 11 +++---- 27 files changed, 90 insertions(+), 152 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 11c156cfe..4e33931a9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -61,12 +61,10 @@ default_python = CondaBuildPack.major_pythons["3"] -rst_prolog = """ +rst_prolog = f""" .. |default_python| replace:: **Python {default_python}** .. |default_python_version| replace:: {default_python} -""".format( - default_python=default_python -) +""" # -- Options for HTML output ------------------------------------------------- diff --git a/repo2docker/__main__.py b/repo2docker/__main__.py index c3e7ef7e5..2ca17f171 100644 --- a/repo2docker/__main__.py +++ b/repo2docker/__main__.py @@ -304,8 +304,8 @@ def make_r2d(argv=None): r2d.volumes[os.path.abspath(args.repo)] = "." else: r2d.log.error( - 'Cannot mount "{}" in editable mode ' - "as it is not a directory".format(args.repo), + f'Cannot mount "{args.repo}" in editable mode ' + "as it is not a directory", extra=dict(phase=R2dState.FAILED), ) sys.exit(1) diff --git a/repo2docker/_version.py b/repo2docker/_version.py index aca7b127e..017b26612 100644 --- a/repo2docker/_version.py +++ b/repo2docker/_version.py @@ -293,7 +293,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = f"unable to parse git-describe output: '{describe_out}'" return pieces # tag @@ -302,10 +302,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( - full_tag, - tag_prefix, - ) + pieces[ + "error" + ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'" return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] diff --git a/repo2docker/app.py b/repo2docker/app.py index 718b12b29..5bee81b41 100755 --- a/repo2docker/app.py +++ b/repo2docker/app.py @@ -425,9 +425,7 @@ def get_engine(self): entry = engines[self.engine] except KeyError: raise ContainerEngineException( - "Container engine '{}' not found. Available engines: {}".format( - self.engine, ",".join(engines.keys()) - ) + f"Container engine '{self.engine}' not found. Available engines: {','.join(engines.keys())}" ) engine_class = entry.load() return engine_class(parent=self) @@ -447,16 +445,11 @@ def fetch(self, url, ref, checkout_path): spec = cp.detect(url, ref=ref) if spec is not None: picked_content_provider = cp - self.log.info( - "Picked {cp} content " - "provider.\n".format(cp=cp.__class__.__name__) - ) + self.log.info(f"Picked {cp.__class__.__name__} content provider.\n") break if picked_content_provider is None: - self.log.error( - "No matching content provider found for " "{url}.".format(url=url) - ) + self.log.error(f"No matching content provider found for {url}.") swh_token = self.config.get("swh_token", self.swh_token) if swh_token and isinstance(picked_content_provider, contentproviders.Swhid): @@ -488,8 +481,7 @@ def json_excepthook(self, etype, evalue, traceback): Avoids non-JSON output on errors when using --json-logs """ self.log.error( - "Error during build: %s", - evalue, + f"Error during build: {evalue}", exc_info=(etype, evalue, traceback), extra=dict(phase=R2dState.FAILED), ) @@ -619,11 +611,9 @@ def start_container(self): run_cmd = [ "jupyter", "notebook", - "--ip", - "0.0.0.0", - "--port", - container_port, - f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}" + "--ip=0.0.0.0", + f"--port={container_port}", + f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}", "--NotebookApp.default_url=/lab", ] else: @@ -730,7 +720,7 @@ def build(self): try: docker_client = self.get_engine() except ContainerEngineException as e: - self.log.error("\nContainer engine initialization error: %s\n", e) + self.log.error(f"\nContainer engine initialization error: {e}\n") self.exit(1) # If the source to be executed is a directory, continue using the @@ -751,8 +741,7 @@ def build(self): if self.find_image(): self.log.info( - "Reusing existing image ({}), not " - "building.".format(self.output_image_spec) + f"Reusing existing image ({self.output_image_spec}), not building." ) # no need to build, so skip to the end by `return`ing here # this will still execute the finally clause and let's us @@ -763,8 +752,7 @@ def build(self): checkout_path = os.path.join(checkout_path, self.subdir) if not os.path.isdir(checkout_path): self.log.error( - "Subdirectory %s does not exist", - self.subdir, + f"Subdirectory {self.subdir} does not exist", extra=dict(phase=R2dState.FAILED), ) raise FileNotFoundError(f"Could not find {checkout_path}") @@ -808,8 +796,7 @@ def build(self): ) self.log.info( - "Using %s builder\n", - bp.__class__.__name__, + f"Using {bp.__class__.__name__} builder\n", extra=dict(phase=R2dState.BUILDING), ) diff --git a/repo2docker/buildpacks/_r_base.py b/repo2docker/buildpacks/_r_base.py index c73273793..35a3177b5 100644 --- a/repo2docker/buildpacks/_r_base.py +++ b/repo2docker/buildpacks/_r_base.py @@ -26,7 +26,7 @@ def rstudio_base_scripts(r_version): # we should have --no-install-recommends on all our apt-get install commands, # but here it's important because these recommend r-base, # which will upgrade the installed version of R, undoing our pinned version - r""" + rf""" curl --silent --location --fail {rstudio_url} > /tmp/rstudio.deb && \ curl --silent --location --fail {shiny_server_url} > /tmp/shiny.deb && \ echo '{rstudio_sha256sum} /tmp/rstudio.deb' | sha256sum -c - && \ @@ -37,24 +37,16 @@ def rstudio_base_scripts(r_version): apt-get -qq purge && \ apt-get -qq clean && \ rm -rf /var/lib/apt/lists/* - """.format( - rstudio_url=rstudio_url, - rstudio_sha256sum=rstudio_sha256sum, - shiny_server_url=shiny_server_url, - shiny_sha256sum=shiny_sha256sum, - ), + """, ), ( "${NB_USER}", # Install jupyter-rsession-proxy - r""" + rf""" pip install --no-cache \ jupyter-rsession-proxy=={rsession_proxy_version} \ jupyter-shiny-proxy=={shiny_proxy_version} - """.format( - rsession_proxy_version=rsession_proxy_version, - shiny_proxy_version=shiny_proxy_version, - ), + """, ), ( # Not all of these locations are configurable; so we make sure diff --git a/repo2docker/buildpacks/base.py b/repo2docker/buildpacks/base.py index 16476e9d8..0758afc61 100644 --- a/repo2docker/buildpacks/base.py +++ b/repo2docker/buildpacks/base.py @@ -594,8 +594,8 @@ def _filter_tar(tar): # buildpacks/docker.py where it is duplicated if not isinstance(memory_limit, int): raise ValueError( - "The memory limit has to be specified as an" - "integer but is '{}'".format(type(memory_limit)) + "The memory limit has to be specified as an " + f"integer but is '{type(memory_limit)}'" ) limits = {} if memory_limit: @@ -647,8 +647,7 @@ def get_preassemble_scripts(self): # FIXME: Add support for specifying version numbers if not re.match(r"^[a-z0-9.+-]+", package): raise ValueError( - "Found invalid package name {} in " - "apt.txt".format(package) + f"Found invalid package name {package} in apt.txt" ) extra_apt_packages.append(package) diff --git a/repo2docker/buildpacks/conda/__init__.py b/repo2docker/buildpacks/conda/__init__.py index 662e70bfe..8c4426dc8 100644 --- a/repo2docker/buildpacks/conda/__init__.py +++ b/repo2docker/buildpacks/conda/__init__.py @@ -341,15 +341,13 @@ def get_env_scripts(self): scripts.append( ( "${NB_USER}", - r""" + rf""" TIMEFORMAT='time: %3R' \ - bash -c 'time ${{MAMBA_EXE}} env update -p {0} --file "{1}" && \ + bash -c 'time ${{MAMBA_EXE}} env update -p {env_prefix} --file "{environment_yml}" && \ time ${{MAMBA_EXE}} clean --all -f -y && \ - ${{MAMBA_EXE}} list -p {0} \ + ${{MAMBA_EXE}} list -p {env_prefix} \ ' - """.format( - env_prefix, environment_yml - ), + """, ) ) @@ -361,36 +359,30 @@ def get_env_scripts(self): scripts.append( ( "${NB_USER}", - r""" - ${{MAMBA_EXE}} install -p {0} r-base{1} r-irkernel r-devtools -y && \ + rf""" + ${{MAMBA_EXE}} install -p {env_prefix} r-base{r_pin} r-irkernel r-devtools -y && \ ${{MAMBA_EXE}} clean --all -f -y && \ - ${{MAMBA_EXE}} list -p {0} - """.format( - env_prefix, r_pin - ), + ${{MAMBA_EXE}} list -p {env_prefix} + """, ) ) scripts += rstudio_base_scripts(self.r_version) scripts += [ ( "root", - r""" + rf""" echo auth-none=1 >> /etc/rstudio/rserver.conf && \ echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \ - echo "rsession-which-r={}/bin/R" >> /etc/rstudio/rserver.conf && \ + echo "rsession-which-r={env_prefix}/bin/R" >> /etc/rstudio/rserver.conf && \ echo www-frame-origin=same >> /etc/rstudio/rserver.conf - """.format( - env_prefix - ), + """, ), ( "${NB_USER}", # Register the jupyter kernel - r""" - R --quiet -e "IRkernel::installspec(prefix='{}')" - """.format( - env_prefix - ), + rf""" + R --quiet -e "IRkernel::installspec(prefix='{env_prefix}')" + """, ), ] return scripts diff --git a/repo2docker/buildpacks/docker.py b/repo2docker/buildpacks/docker.py index e0ccfc95c..6d8bd4698 100644 --- a/repo2docker/buildpacks/docker.py +++ b/repo2docker/buildpacks/docker.py @@ -34,8 +34,8 @@ def build( # buildpacks/base.py where it is duplicated if not isinstance(memory_limit, int): raise ValueError( - "The memory limit has to be specified as an" - "integer but is '{}'".format(type(memory_limit)) + "The memory limit has to be specified as an " + f"integer but is '{type(memory_limit)}'" ) limits = {} if memory_limit: diff --git a/repo2docker/buildpacks/nix/__init__.py b/repo2docker/buildpacks/nix/__init__.py index b7ca105d7..4947b92e5 100644 --- a/repo2docker/buildpacks/nix/__init__.py +++ b/repo2docker/buildpacks/nix/__init__.py @@ -62,13 +62,11 @@ def get_assemble_scripts(self): return super().get_assemble_scripts() + [ ( "${NB_USER}", - """ + f""" nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \ nix-channel --update && \ - nix-shell {} - """.format( - self.binder_path("default.nix") - ), + nix-shell {self.binder_path("default.nix")} + """, ) ] diff --git a/repo2docker/buildpacks/pipfile/__init__.py b/repo2docker/buildpacks/pipfile/__init__.py index e928d01ec..f08a1beba 100644 --- a/repo2docker/buildpacks/pipfile/__init__.py +++ b/repo2docker/buildpacks/pipfile/__init__.py @@ -123,9 +123,7 @@ def get_assemble_scripts(self): assemble_scripts.append( ( "${NB_USER}", - '${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( - nb_requirements_file - ), + f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"', ) ) diff --git a/repo2docker/buildpacks/python/__init__.py b/repo2docker/buildpacks/python/__init__.py index 0c12f91b8..bdcaa5209 100644 --- a/repo2docker/buildpacks/python/__init__.py +++ b/repo2docker/buildpacks/python/__init__.py @@ -55,9 +55,7 @@ def _get_pip_scripts(self): "${NB_USER}", # want the $NB_PYHTON_PREFIX environment variable, not for # Python's string formatting to try and replace this - '${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( - nb_requirements_file - ), + f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"', ) ) diff --git a/repo2docker/buildpacks/r.py b/repo2docker/buildpacks/r.py index 9009a4d87..3156913f9 100644 --- a/repo2docker/buildpacks/r.py +++ b/repo2docker/buildpacks/r.py @@ -336,12 +336,10 @@ def get_build_scripts(self): ( "${NB_USER}", # Install a pinned version of devtools, IRKernel and shiny - r""" - R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{devtools_cran_mirror_url}')" && \ + rf""" + R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{self.get_devtools_snapshot_url()}')" && \ R --quiet -e "IRkernel::installspec(prefix='$NB_PYTHON_PREFIX')" - """.format( - devtools_cran_mirror_url=self.get_devtools_snapshot_url() - ), + """, ), ] @@ -374,8 +372,7 @@ def get_preassemble_scripts(self): "${NB_USER}", # Delete /tmp/downloaded_packages only if install.R fails, as the second # invocation of install.R might be able to reuse them - "Rscript %s && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages" - % installR_path, + f"Rscript {installR_path} && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages", ) ] @@ -392,9 +389,7 @@ def get_assemble_scripts(self): "${NB_USER}", # only run install.R if the pre-assembly failed # Delete any downloaded packages in /tmp, as they aren't reused by R - """if [ ! -f /tmp/.preassembled ]; then Rscript {}; rm -rf /tmp/downloaded_packages; fi""".format( - installR_path - ), + f"""if [ ! -f /tmp/.preassembled ]; then Rscript {installR_path}; rm -rf /tmp/downloaded_packages; fi""", ) ] diff --git a/repo2docker/contentproviders/base.py b/repo2docker/contentproviders/base.py index f4f1cbbb0..672c39733 100644 --- a/repo2docker/contentproviders/base.py +++ b/repo2docker/contentproviders/base.py @@ -68,6 +68,6 @@ def detect(self, source, ref=None, extra_args=None): def fetch(self, spec, output_dir, yield_output=False): # nothing to be done if your content is already in the output directory - msg = "Local content provider assumes {} == {}".format(spec["path"], output_dir) + msg = f'Local content provider assumes {spec["path"]} == {output_dir}' assert output_dir == spec["path"], msg - yield "Using local repo {}.\n".format(spec["path"]) + yield f'Using local repo {spec["path"]}.\n' diff --git a/repo2docker/contentproviders/dataverse.py b/repo2docker/contentproviders/dataverse.py index c308c2d09..e6e7da38c 100644 --- a/repo2docker/contentproviders/dataverse.py +++ b/repo2docker/contentproviders/dataverse.py @@ -76,9 +76,7 @@ def detect(self, doi, ref=None, extra_args=None): data = self.urlopen(search_url).json()["data"] if data["count_in_response"] != 1: self.log.debug( - "Dataverse search query failed!\n - doi: {}\n - url: {}\n - resp: {}\n".format( - doi, url, json.dump(data) - ) + f"Dataverse search query failed!\n - doi: {doi}\n - url: {url}\n - resp: {json.dump(data)}\n" ) return @@ -98,16 +96,14 @@ def fetch(self, spec, output_dir, yield_output=False): host = spec["host"] yield f"Fetching Dataverse record {record_id}.\n" - url = "{}/api/datasets/:persistentId?persistentId={}".format( - host["url"], record_id - ) + url = f'{host["url"]}/api/datasets/:persistentId?persistentId={record_id}' resp = self.urlopen(url, headers={"accept": "application/json"}) record = resp.json()["data"] for fobj in deep_get(record, "latestVersion.files"): - file_url = "{}/api/access/datafile/{}".format( - host["url"], deep_get(fobj, "dataFile.id") + file_url = ( + f'{host["url"]}/api/access/datafile/{deep_get(fobj, "dataFile.id")}' ) filename = os.path.join(fobj.get("directoryLabel", ""), fobj["label"]) diff --git a/repo2docker/contentproviders/figshare.py b/repo2docker/contentproviders/figshare.py index 5d27684d1..0735e441c 100644 --- a/repo2docker/contentproviders/figshare.py +++ b/repo2docker/contentproviders/figshare.py @@ -75,11 +75,9 @@ def fetch(self, spec, output_dir, yield_output=False): article_version = spec["version"] host = spec["host"] - yield "Fetching Figshare article {} in version {}.\n".format( - article_id, article_version - ) + yield f"Fetching Figshare article {article_id} in version {article_version}.\n" resp = self.urlopen( - "{}{}/versions/{}".format(host["api"], article_id, article_version), + f'{host["api"]}{article_id}/versions/{article_version}', headers={"accept": "application/json"}, ) diff --git a/repo2docker/contentproviders/git.py b/repo2docker/contentproviders/git.py index 9e5fb48e7..a3d330e6c 100644 --- a/repo2docker/contentproviders/git.py +++ b/repo2docker/contentproviders/git.py @@ -43,7 +43,7 @@ def fetch(self, spec, output_dir, yield_output=False): hash = check_ref(ref, output_dir) if hash is None: self.log.error( - "Failed to check out ref %s", ref, extra=dict(phase=R2dState.FAILED) + f"Failed to check out ref {ref}", extra=dict(phase=R2dState.FAILED) ) if ref == "master": msg = ( diff --git a/repo2docker/contentproviders/hydroshare.py b/repo2docker/contentproviders/hydroshare.py index 3378129ab..886a8f81a 100755 --- a/repo2docker/contentproviders/hydroshare.py +++ b/repo2docker/contentproviders/hydroshare.py @@ -59,7 +59,7 @@ def fetch(self, spec, output_dir, yield_output=False, timeout=120): resource_id = spec["resource"] host = spec["host"] - bag_url = "{}{}".format(host["django_irods"], resource_id) + bag_url = f'{host["django_irods"]}{resource_id}' yield f"Downloading {bag_url}.\n" @@ -76,9 +76,7 @@ def fetch(self, spec, output_dir, yield_output=False, timeout=120): msg = "Bag taking too long to prepare, exiting now, try again later." yield msg raise ContentProviderException(msg) - yield "Bag is being prepared, requesting again in {} seconds.\n".format( - wait_time - ) + yield f"Bag is being prepared, requesting again in {wait_time} seconds.\n" time.sleep(wait_time) conn = self.urlopen(bag_url) if conn.status_code != 200: diff --git a/repo2docker/contentproviders/mercurial.py b/repo2docker/contentproviders/mercurial.py index e29584f29..821f55f37 100644 --- a/repo2docker/contentproviders/mercurial.py +++ b/repo2docker/contentproviders/mercurial.py @@ -60,7 +60,7 @@ def fetch(self, spec, output_dir, yield_output=False): ) except subprocess.CalledProcessError: self.log.error( - "Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED) + f"Failed to update to ref {ref}", extra=dict(phase=R2dState.FAILED) ) raise ValueError(f"Failed to update to ref {ref}") diff --git a/repo2docker/contentproviders/zenodo.py b/repo2docker/contentproviders/zenodo.py index a58e295f8..0a8d91084 100644 --- a/repo2docker/contentproviders/zenodo.py +++ b/repo2docker/contentproviders/zenodo.py @@ -68,7 +68,7 @@ def fetch(self, spec, output_dir, yield_output=False): yield f"Fetching Zenodo record {record_id}.\n" resp = self.urlopen( - "{}{}".format(host["api"], record_id), + f'{host["api"]}{record_id}', headers={"accept": "application/json"}, ) diff --git a/repo2docker/utils.py b/repo2docker/utils.py index e8833bd84..852f21e28 100644 --- a/repo2docker/utils.py +++ b/repo2docker/utils.py @@ -136,13 +136,10 @@ def check_port(port): try: p = int(port) except ValueError as e: - raise ValueError( - 'Port specification "{}" has ' "an invalid port.".format(mapping) - ) + raise ValueError(f'Port specification "{mapping}" has an invalid port.') if not 0 < p <= 65535: raise ValueError( - 'Port specification "{}" specifies ' - "a port outside 1-65535.".format(mapping) + f'Port specification "{mapping}" specifies a port outside 1-65535.' ) return port @@ -152,8 +149,7 @@ def check_port_string(p): port, protocol = parts if protocol not in ("tcp", "udp"): raise ValueError( - 'Port specification "{}" has ' - "an invalid protocol.".format(mapping) + f'Port specification "{mapping}" has an invalid protocol.' ) elif len(parts) == 1: port = parts[0] @@ -310,14 +306,14 @@ def validate(self, obj, value): num = float(value[:-1]) except ValueError: raise TraitError( - "{val} is not a valid memory specification. " - "Must be an int or a string with suffix K, M, G, T".format(val=value) + f"{value} is not a valid memory specification. " + "Must be an int or a string with suffix K, M, G, T" ) suffix = value[-1] if suffix not in self.UNIT_SUFFIXES: raise TraitError( - "{val} is not a valid memory specification. " - "Must be an int or a string with suffix K, M, G, T".format(val=value) + f"{value} is not a valid memory specification. " + "Must be an int or a string with suffix K, M, G, T" ) else: return int(float(num) * self.UNIT_SUFFIXES[suffix]) diff --git a/tests/conda/py2/verify b/tests/conda/py2/verify index b251755a6..4a55686cf 100755 --- a/tests/conda/py2/verify +++ b/tests/conda/py2/verify @@ -30,4 +30,4 @@ for pkg in pkgs: assert pkg["version"].startswith("2.7.") break else: - assert False, "python not found in %s" % pkg_names + assert False, f"python not found in {pkg_names}" diff --git a/tests/conftest.py b/tests/conftest.py index eda58223f..e2914801a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -65,7 +65,7 @@ def build_noop(): container = app.start_container() port = app.port # wait a bit for the container to be ready - container_url = "http://localhost:%s/api" % port + container_url = f"http://localhost:{port}/api" # give the container a chance to start time.sleep(1) try: @@ -77,13 +77,13 @@ def build_noop(): try: info = requests.get(container_url).json() except Exception as e: - print("Error: %s" % e) + print(f"Error: {e}") time.sleep(i * 3) else: print(info) success = True break - assert success, "Notebook never started in %s" % container + assert success, f"Notebook never started in {container}" finally: # stop the container container.stop() @@ -211,7 +211,7 @@ def reportinfo(self): def repr_failure(self, excinfo): err = excinfo.value if isinstance(err, SystemExit): - cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args)) + cmd = f'jupyter-repo2docker {" ".join(map(pipes.quote, self.args))}' return f"{cmd} | exited with status={err.code}" else: return super().repr_failure(excinfo) diff --git a/tests/unit/test_connect_url.py b/tests/unit/test_connect_url.py index 3115a8ab3..7438b8aa3 100644 --- a/tests/unit/test_connect_url.py +++ b/tests/unit/test_connect_url.py @@ -59,13 +59,13 @@ def test_connect_url(tmpdir): try: info = requests.get(container_url).json() except Exception as e: - print("Error: %s" % e) + print(f"Error: {e}") time.sleep(i * 3) else: print(info) success = True break - assert success, "Notebook never started in %s" % container + assert success, f"Notebook never started in {container}" finally: # stop the container container.stop() diff --git a/tests/unit/test_subdir.py b/tests/unit/test_subdir.py index 43d08fce3..4b0cd47ba 100644 --- a/tests/unit/test_subdir.py +++ b/tests/unit/test_subdir.py @@ -21,7 +21,7 @@ def test_subdir(run_repo2docker): run_repo2docker(argv) # check that we restored the current working directory - assert cwd == os.getcwd(), "We should be back in %s" % cwd + assert cwd == os.getcwd(), f"We should be back in {cwd}" def test_subdir_in_image_name(): diff --git a/tests/unit/test_users.py b/tests/unit/test_users.py index e14eeaf13..642e4f3b2 100644 --- a/tests/unit/test_users.py +++ b/tests/unit/test_users.py @@ -34,19 +34,14 @@ def test_user(): subprocess.check_call( [ "repo2docker", - "-v", - f"{tmpdir}:/home/{username}", - "--user-id", - userid, - "--user-name", - username, + f"--volume={tmpdir}:/home/{username}", + f"--user-id={userid}", + f"--user-name={username}", tmpdir, "--", "/bin/bash", "-c", - "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user".format( - ts - ), + "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user", ] ) diff --git a/tests/venv/usr-bin/verify b/tests/venv/usr-bin/verify index 4370b68ed..c6efe7823 100755 --- a/tests/venv/usr-bin/verify +++ b/tests/venv/usr-bin/verify @@ -4,4 +4,4 @@ import os assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH") assert os.getcwd() == os.environ["REPO_DIR"] -assert "{}/.local/bin".format(os.environ["REPO_DIR"]) in os.getenv("PATH") +assert f'{os.environ["REPO_DIR"]}/.local/bin' in os.getenv("PATH") diff --git a/versioneer.py b/versioneer.py index a47088994..03a560576 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1106,7 +1106,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = f"unable to parse git-describe output: '{describe_out}'" return pieces # tag @@ -1115,10 +1115,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( - full_tag, - tag_prefix, - ) + pieces[ + "error" + ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'" return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] @@ -1256,7 +1255,7 @@ def write_to_version_file(filename, versions): with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) - print("set {} to '{}'".format(filename, versions["version"])) + print(f"set {filename} to '{versions['version']}'") def plus_or_dot(pieces): From 1926795c8ca3a4be418f45b0b9c2689e2ab3f396 Mon Sep 17 00:00:00 2001 From: Erik Sundell Date: Mon, 31 Oct 2022 23:29:57 +0100 Subject: [PATCH 4/7] pre-commit: enable isort --- .pre-commit-config.yaml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 42d13f19e..8bd52ff20 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,14 +32,15 @@ repos: - --target-version=py38 - --target-version=py39 - --target-version=py310 + - --target-version=py311 - # # Autoformat: Python code - # - repo: https://github.com/pycqa/isort - # rev: 5.10.1 - # hooks: - # - id: isort - # args: - # - --profile=black + # Autoformat: Python code + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + args: + - --profile=black # # Autoformat: markdown, yaml (but not helm templates) # - repo: https://github.com/pre-commit/mirrors-prettier From 27c2b403dbd3b6bb1b31f687ca4069434301465f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 22:32:14 +0000 Subject: [PATCH 5/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- docs/source/conf.py | 1 - repo2docker/__main__.py | 11 +++++----- repo2docker/app.py | 14 ++++++------- repo2docker/buildpacks/__init__.py | 11 +++++----- repo2docker/buildpacks/base.py | 11 +++++----- repo2docker/buildpacks/conda/__init__.py | 4 ++-- repo2docker/buildpacks/conda/freeze.py | 7 +++---- repo2docker/buildpacks/docker.py | 2 ++ repo2docker/buildpacks/julia/julia_project.py | 2 +- repo2docker/buildpacks/julia/julia_require.py | 2 +- repo2docker/buildpacks/nix/__init__.py | 2 +- repo2docker/buildpacks/python/__init__.py | 2 +- repo2docker/buildpacks/r.py | 8 ++++---- repo2docker/contentproviders/__init__.py | 6 +++--- repo2docker/contentproviders/dataverse.py | 7 +++---- repo2docker/contentproviders/doi.py | 17 +++++++--------- repo2docker/contentproviders/figshare.py | 10 ++++------ repo2docker/contentproviders/git.py | 2 +- repo2docker/contentproviders/hydroshare.py | 9 ++++----- repo2docker/contentproviders/mercurial.py | 2 +- repo2docker/contentproviders/swhid.py | 7 +++---- repo2docker/contentproviders/zenodo.py | 10 ++++------ repo2docker/docker.py | 5 +++-- repo2docker/engine.py | 2 +- repo2docker/utils.py | 11 +++++----- setup.py | 8 +++++--- .../conda/r3.6-target-repo-dir-flag/verify.py | 2 +- tests/conftest.py | 3 +-- tests/memlimit/dockerfile/postBuild | 2 +- tests/memlimit/non-dockerfile/postBuild | 2 +- .../py2-with-server-and-kernel-req/verify | 1 - .../setup-py-explicit-in-binder-dir/setup.py | 2 +- .../setup-py-explicit-in-binder-dir/verify | 2 +- tests/pipfile/setup-py-explicit/setup.py | 2 +- tests/pipfile/setup-py-explicit/verify | 2 +- tests/pipfile/setup-py-implicit/setup.py | 2 +- tests/unit/contentproviders/test_dataverse.py | 7 +++---- tests/unit/contentproviders/test_doi.py | 14 ++++++------- tests/unit/contentproviders/test_figshare.py | 11 +++++----- tests/unit/contentproviders/test_git.py | 4 +++- .../unit/contentproviders/test_hydroshare.py | 9 ++++----- tests/unit/contentproviders/test_local.py | 2 +- tests/unit/contentproviders/test_mercurial.py | 1 - tests/unit/contentproviders/test_swhid.py | 20 +++++++++---------- tests/unit/contentproviders/test_zenodo.py | 8 ++++---- tests/unit/test_app.py | 6 +++--- tests/unit/test_args.py | 4 +++- tests/unit/test_argumentvalidation.py | 1 - tests/unit/test_buildpack.py | 3 ++- tests/unit/test_cache_from.py | 1 - tests/unit/test_clone_depth.py | 2 -- tests/unit/test_connect_url.py | 5 +++-- tests/unit/test_editable.py | 1 - tests/unit/test_env_yml.py | 2 ++ tests/unit/test_external_scripts.py | 1 + tests/unit/test_freeze.py | 3 +-- tests/unit/test_labels.py | 9 +++++---- tests/unit/test_memlimit.py | 5 +---- tests/unit/test_ports.py | 10 +++++----- tests/unit/test_r.py | 2 +- tests/unit/test_semver.py | 1 + tests/unit/test_subdir.py | 2 +- tests/unit/test_utils.py | 8 +++++--- 63 files changed, 162 insertions(+), 173 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 4e33931a9..ec8294dad 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -4,7 +4,6 @@ # import datetime - # -- Project information ----------------------------------------------------- # ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information # diff --git a/repo2docker/__main__.py b/repo2docker/__main__.py index 2ca17f171..cca1616ce 100644 --- a/repo2docker/__main__.py +++ b/repo2docker/__main__.py @@ -1,14 +1,15 @@ import argparse -import sys -import os import logging +import os +import sys + +from . import __version__ from .app import Repo2Docker from .engine import BuildError, ImageLoadError -from . import __version__ from .utils import ( - validate_and_generate_port_mapping, - is_valid_docker_image_name, R2dState, + is_valid_docker_image_name, + validate_and_generate_port_mapping, ) diff --git a/repo2docker/app.py b/repo2docker/app.py index 5bee81b41..c62a77d63 100755 --- a/repo2docker/app.py +++ b/repo2docker/app.py @@ -7,24 +7,23 @@ python -m repo2docker https://github.com/you/your-repo """ +import getpass import json -import sys import logging import os -import entrypoints -import getpass import shutil +import sys import tempfile import time from urllib.parse import urlparse +import entrypoints import escapism from pythonjsonlogger import jsonlogger - -from traitlets import Any, Dict, Int, List, Unicode, Bool, default +from traitlets import Any, Bool, Dict, Int, List, Unicode, default from traitlets.config import Application -from . import __version__ +from . import __version__, contentproviders from .buildpacks import ( CondaBuildPack, DockerBuildPack, @@ -36,9 +35,8 @@ PythonBuildPack, RBuildPack, ) -from . import contentproviders from .engine import BuildError, ContainerEngineException, ImageLoadError -from .utils import ByteSpecification, chdir, R2dState +from .utils import ByteSpecification, R2dState, chdir class Repo2Docker(Application): diff --git a/repo2docker/buildpacks/__init__.py b/repo2docker/buildpacks/__init__.py index 10fe89a4e..7568000bd 100644 --- a/repo2docker/buildpacks/__init__.py +++ b/repo2docker/buildpacks/__init__.py @@ -1,10 +1,9 @@ -from .base import BuildPack, BaseImage -from .python import PythonBuildPack -from .pipfile import PipfileBuildPack +from .base import BaseImage, BuildPack from .conda import CondaBuildPack -from .julia import JuliaProjectTomlBuildPack -from .julia import JuliaRequireBuildPack from .docker import DockerBuildPack +from .julia import JuliaProjectTomlBuildPack, JuliaRequireBuildPack from .legacy import LegacyBinderDockerBuildPack -from .r import RBuildPack from .nix import NixBuildPack +from .pipfile import PipfileBuildPack +from .python import PythonBuildPack +from .r import RBuildPack diff --git a/repo2docker/buildpacks/base.py b/repo2docker/buildpacks/base.py index 0758afc61..e7c9b537d 100644 --- a/repo2docker/buildpacks/base.py +++ b/repo2docker/buildpacks/base.py @@ -1,14 +1,15 @@ -import textwrap -import jinja2 -import tarfile +import hashlib import io +import logging import os import re -import logging import string import sys -import hashlib +import tarfile +import textwrap + import escapism +import jinja2 # Only use syntax features supported by Docker 17.09 TEMPLATE = r""" diff --git a/repo2docker/buildpacks/conda/__init__.py b/repo2docker/buildpacks/conda/__init__.py index 8c4426dc8..99653177b 100644 --- a/repo2docker/buildpacks/conda/__init__.py +++ b/repo2docker/buildpacks/conda/__init__.py @@ -5,9 +5,9 @@ from ruamel.yaml import YAML -from ..base import BaseImage -from .._r_base import rstudio_base_scripts from ...utils import is_local_pip_requirement +from .._r_base import rstudio_base_scripts +from ..base import BaseImage # pattern for parsing conda dependency line PYTHON_REGEX = re.compile(r"python\s*=+\s*([\d\.]*)") diff --git a/repo2docker/buildpacks/conda/freeze.py b/repo2docker/buildpacks/conda/freeze.py index 84475457f..7bdb444eb 100755 --- a/repo2docker/buildpacks/conda/freeze.py +++ b/repo2docker/buildpacks/conda/freeze.py @@ -9,17 +9,16 @@ python freeze.py [3.8] """ -from argparse import ArgumentParser -from datetime import datetime import os import pathlib import shutil -from subprocess import check_call import sys +from argparse import ArgumentParser +from datetime import datetime +from subprocess import check_call from ruamel.yaml import YAML - HERE = pathlib.Path(os.path.dirname(os.path.abspath(__file__))) ENV_FILE = HERE / "environment.yml" diff --git a/repo2docker/buildpacks/docker.py b/repo2docker/buildpacks/docker.py index 6d8bd4698..6979b8305 100644 --- a/repo2docker/buildpacks/docker.py +++ b/repo2docker/buildpacks/docker.py @@ -1,7 +1,9 @@ """Generates a variety of Dockerfiles based on an input matrix """ import os + import docker + from .base import BuildPack diff --git a/repo2docker/buildpacks/julia/julia_project.py b/repo2docker/buildpacks/julia/julia_project.py index e6e191dc2..d61e65d13 100644 --- a/repo2docker/buildpacks/julia/julia_project.py +++ b/repo2docker/buildpacks/julia/julia_project.py @@ -6,8 +6,8 @@ import semver import toml -from ..python import PythonBuildPack from ...semver import find_semver_match +from ..python import PythonBuildPack class JuliaProjectTomlBuildPack(PythonBuildPack): diff --git a/repo2docker/buildpacks/julia/julia_require.py b/repo2docker/buildpacks/julia/julia_require.py index 075b928e0..b2acbcff6 100644 --- a/repo2docker/buildpacks/julia/julia_require.py +++ b/repo2docker/buildpacks/julia/julia_require.py @@ -2,8 +2,8 @@ import os -from ..python import PythonBuildPack from ...semver import parse_version as V +from ..python import PythonBuildPack class JuliaRequireBuildPack(PythonBuildPack): diff --git a/repo2docker/buildpacks/nix/__init__.py b/repo2docker/buildpacks/nix/__init__.py index 4947b92e5..73dd81af4 100644 --- a/repo2docker/buildpacks/nix/__init__.py +++ b/repo2docker/buildpacks/nix/__init__.py @@ -1,7 +1,7 @@ """BuildPack for nixpkgs environments""" import os -from ..base import BuildPack, BaseImage +from ..base import BaseImage, BuildPack class NixBuildPack(BaseImage): diff --git a/repo2docker/buildpacks/python/__init__.py b/repo2docker/buildpacks/python/__init__.py index bdcaa5209..7aa225642 100644 --- a/repo2docker/buildpacks/python/__init__.py +++ b/repo2docker/buildpacks/python/__init__.py @@ -1,8 +1,8 @@ """Generates Dockerfiles based on an input matrix based on Python.""" import os -from ..conda import CondaBuildPack from ...utils import is_local_pip_requirement, open_guess_encoding +from ..conda import CondaBuildPack class PythonBuildPack(CondaBuildPack): diff --git a/repo2docker/buildpacks/r.py b/repo2docker/buildpacks/r.py index 3156913f9..200f472fd 100644 --- a/repo2docker/buildpacks/r.py +++ b/repo2docker/buildpacks/r.py @@ -1,12 +1,12 @@ -import re -import os import datetime -import requests +import os +import re +import requests from ..semver import parse_version as V -from .python import PythonBuildPack from ._r_base import rstudio_base_scripts +from .python import PythonBuildPack class RBuildPack(PythonBuildPack): diff --git a/repo2docker/contentproviders/__init__.py b/repo2docker/contentproviders/__init__.py index 6398c233e..5c40476df 100755 --- a/repo2docker/contentproviders/__init__.py +++ b/repo2docker/contentproviders/__init__.py @@ -1,8 +1,8 @@ -from .git import Git from .base import Local -from .zenodo import Zenodo -from .figshare import Figshare from .dataverse import Dataverse +from .figshare import Figshare +from .git import Git from .hydroshare import Hydroshare from .mercurial import Mercurial from .swhid import Swhid +from .zenodo import Zenodo diff --git a/repo2docker/contentproviders/dataverse.py b/repo2docker/contentproviders/dataverse.py index e6e7da38c..1998f6bef 100644 --- a/repo2docker/contentproviders/dataverse.py +++ b/repo2docker/contentproviders/dataverse.py @@ -1,11 +1,10 @@ -import os import json +import os import shutil +from urllib.parse import parse_qs, urlparse, urlunparse -from urllib.parse import urlparse, urlunparse, parse_qs - -from .doi import DoiProvider from ..utils import copytree, deep_get +from .doi import DoiProvider class Dataverse(DoiProvider): diff --git a/repo2docker/contentproviders/doi.py b/repo2docker/contentproviders/doi.py index 01d048ec7..03fa0171c 100644 --- a/repo2docker/contentproviders/doi.py +++ b/repo2docker/contentproviders/doi.py @@ -1,18 +1,15 @@ -import os import json -import shutil import logging - -from os import makedirs -from os import path -from requests import Session, HTTPError - +import os +import shutil +from os import makedirs, path from zipfile import ZipFile, is_zipfile -from .base import ContentProvider -from ..utils import copytree, deep_get -from ..utils import normalize_doi, is_doi +from requests import HTTPError, Session + from .. import __version__ +from ..utils import copytree, deep_get, is_doi, normalize_doi +from .base import ContentProvider class DoiProvider(ContentProvider): diff --git a/repo2docker/contentproviders/figshare.py b/repo2docker/contentproviders/figshare.py index 0735e441c..6c7b26c86 100644 --- a/repo2docker/contentproviders/figshare.py +++ b/repo2docker/contentproviders/figshare.py @@ -1,16 +1,14 @@ +import json import os import re -import json import shutil - -from os import makedirs -from os import path -from urllib.request import Request +from os import makedirs, path from urllib.error import HTTPError +from urllib.request import Request from zipfile import is_zipfile -from .doi import DoiProvider from ..utils import copytree, deep_get +from .doi import DoiProvider class Figshare(DoiProvider): diff --git a/repo2docker/contentproviders/git.py b/repo2docker/contentproviders/git.py index a3d330e6c..f35e9a888 100644 --- a/repo2docker/contentproviders/git.py +++ b/repo2docker/contentproviders/git.py @@ -1,7 +1,7 @@ import subprocess +from ..utils import R2dState, check_ref, execute_cmd from .base import ContentProvider, ContentProviderException -from ..utils import execute_cmd, check_ref, R2dState class Git(ContentProvider): diff --git a/repo2docker/contentproviders/hydroshare.py b/repo2docker/contentproviders/hydroshare.py index 886a8f81a..4104d97a6 100755 --- a/repo2docker/contentproviders/hydroshare.py +++ b/repo2docker/contentproviders/hydroshare.py @@ -1,14 +1,13 @@ -import zipfile +import json import os import shutil import time -import json -from datetime import datetime, timezone, timedelta - +import zipfile +from datetime import datetime, timedelta, timezone from urllib.request import urlretrieve -from .doi import DoiProvider from .base import ContentProviderException +from .doi import DoiProvider class Hydroshare(DoiProvider): diff --git a/repo2docker/contentproviders/mercurial.py b/repo2docker/contentproviders/mercurial.py index 821f55f37..e427cc022 100644 --- a/repo2docker/contentproviders/mercurial.py +++ b/repo2docker/contentproviders/mercurial.py @@ -1,7 +1,7 @@ import subprocess +from ..utils import R2dState, execute_cmd from .base import ContentProvider, ContentProviderException -from ..utils import execute_cmd, R2dState args_enabling_topic = ["--config", "extensions.topic="] diff --git a/repo2docker/contentproviders/swhid.py b/repo2docker/contentproviders/swhid.py index 2a964e70f..18dc78a91 100644 --- a/repo2docker/contentproviders/swhid.py +++ b/repo2docker/contentproviders/swhid.py @@ -1,17 +1,16 @@ import io import os +import re import shutil import tarfile import time -import re - from os import path import requests -from .base import ContentProvider -from ..utils import copytree from .. import __version__ +from ..utils import copytree +from .base import ContentProvider def parse_swhid(swhid): diff --git a/repo2docker/contentproviders/zenodo.py b/repo2docker/contentproviders/zenodo.py index 0a8d91084..5d02f723b 100644 --- a/repo2docker/contentproviders/zenodo.py +++ b/repo2docker/contentproviders/zenodo.py @@ -1,14 +1,12 @@ -import os import json +import os import shutil - -from os import makedirs -from os import path -from urllib.request import Request +from os import makedirs, path from urllib.error import HTTPError +from urllib.request import Request -from .doi import DoiProvider from ..utils import copytree, deep_get +from .doi import DoiProvider class Zenodo(DoiProvider): diff --git a/repo2docker/docker.py b/repo2docker/docker.py index 686a70db5..267c16cf6 100644 --- a/repo2docker/docker.py +++ b/repo2docker/docker.py @@ -2,9 +2,10 @@ Docker container engine for repo2docker """ -import docker -from traitlets import Dict from iso8601 import parse_date +from traitlets import Dict + +import docker from .engine import Container, ContainerEngine, ContainerEngineException, Image diff --git a/repo2docker/engine.py b/repo2docker/engine.py index 459b2be83..73276a3a7 100644 --- a/repo2docker/engine.py +++ b/repo2docker/engine.py @@ -3,8 +3,8 @@ """ from abc import ABC, abstractmethod -from traitlets.config import LoggingConfigurable +from traitlets.config import LoggingConfigurable # Based on https://docker-py.readthedocs.io/en/4.2.0/containers.html diff --git a/repo2docker/utils.py b/repo2docker/utils.py index 852f21e28..cd345f4a0 100644 --- a/repo2docker/utils.py +++ b/repo2docker/utils.py @@ -1,13 +1,12 @@ -from contextlib import contextmanager -from enum import Enum -from functools import partial import os import re import subprocess -import chardet - -from shutil import copystat, copy2 +from contextlib import contextmanager +from enum import Enum +from functools import partial +from shutil import copy2, copystat +import chardet from traitlets import Integer, TraitError diff --git a/setup.py b/setup.py index e4eb16a0f..58abff7c0 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,8 @@ -from distutils.cmd import Command -from setuptools import setup, find_packages import sys +from distutils.cmd import Command + +from setuptools import find_packages, setup + import versioneer if sys.version_info[0] < 3: @@ -23,8 +25,8 @@ def finalize_options(self): pass def run(self): - from urllib.request import urlopen import json + from urllib.request import urlopen resp = urlopen(self.url, timeout=5) resp_body = resp.read() diff --git a/tests/conda/r3.6-target-repo-dir-flag/verify.py b/tests/conda/r3.6-target-repo-dir-flag/verify.py index d284c28a6..701278364 100755 --- a/tests/conda/r3.6-target-repo-dir-flag/verify.py +++ b/tests/conda/r3.6-target-repo-dir-flag/verify.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -import sys import os +import sys from glob import glob # conda should still be in /srv/conda diff --git a/tests/conftest.py b/tests/conftest.py index e2914801a..13e62b401 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,14 +19,13 @@ import os import pipes import shlex -import requests import subprocess import time from tempfile import TemporaryDirectory - import escapism import pytest +import requests import yaml from repo2docker.__main__ import make_r2d diff --git a/tests/memlimit/dockerfile/postBuild b/tests/memlimit/dockerfile/postBuild index 4fe5360f2..1d7819241 100755 --- a/tests/memlimit/dockerfile/postBuild +++ b/tests/memlimit/dockerfile/postBuild @@ -10,8 +10,8 @@ NOTE: This file has to be duplicated & present in all the following locations: - tests/memlimit/dockerfile/postBuild See https://github.com/jupyterhub/repo2docker/issues/160 for reason """ -from ctypes import cdll, c_void_p, memset import os +from ctypes import c_void_p, cdll, memset libc = cdll.LoadLibrary("libc.so.6") libc.malloc.restype = c_void_p diff --git a/tests/memlimit/non-dockerfile/postBuild b/tests/memlimit/non-dockerfile/postBuild index 4fe5360f2..1d7819241 100755 --- a/tests/memlimit/non-dockerfile/postBuild +++ b/tests/memlimit/non-dockerfile/postBuild @@ -10,8 +10,8 @@ NOTE: This file has to be duplicated & present in all the following locations: - tests/memlimit/dockerfile/postBuild See https://github.com/jupyterhub/repo2docker/issues/160 for reason """ -from ctypes import cdll, c_void_p, memset import os +from ctypes import c_void_p, cdll, memset libc = cdll.LoadLibrary("libc.so.6") libc.malloc.restype = c_void_p diff --git a/tests/pipfile/py2-with-server-and-kernel-req/verify b/tests/pipfile/py2-with-server-and-kernel-req/verify index 7246be615..5ff8a889c 100755 --- a/tests/pipfile/py2-with-server-and-kernel-req/verify +++ b/tests/pipfile/py2-with-server-and-kernel-req/verify @@ -2,7 +2,6 @@ import os import sys - # Verify - kernel's Python: use Python 2 print(sys.version_info) assert sys.version_info[:2] == (2, 7) diff --git a/tests/pipfile/setup-py-explicit-in-binder-dir/setup.py b/tests/pipfile/setup-py-explicit-in-binder-dir/setup.py index c67a722a7..ca077f415 100644 --- a/tests/pipfile/setup-py-explicit-in-binder-dir/setup.py +++ b/tests/pipfile/setup-py-explicit-in-binder-dir/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup setup( name="Dummy", diff --git a/tests/pipfile/setup-py-explicit-in-binder-dir/verify b/tests/pipfile/setup-py-explicit-in-binder-dir/verify index 4c6eaf1e0..80f73003f 100755 --- a/tests/pipfile/setup-py-explicit-in-binder-dir/verify +++ b/tests/pipfile/setup-py-explicit-in-binder-dir/verify @@ -1,6 +1,6 @@ #!/usr/bin/env python -import there import dummy # This package should be available, as it was a dependency for dummy import pypi_pkg_test +import there diff --git a/tests/pipfile/setup-py-explicit/setup.py b/tests/pipfile/setup-py-explicit/setup.py index c67a722a7..ca077f415 100644 --- a/tests/pipfile/setup-py-explicit/setup.py +++ b/tests/pipfile/setup-py-explicit/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup setup( name="Dummy", diff --git a/tests/pipfile/setup-py-explicit/verify b/tests/pipfile/setup-py-explicit/verify index 4c6eaf1e0..80f73003f 100755 --- a/tests/pipfile/setup-py-explicit/verify +++ b/tests/pipfile/setup-py-explicit/verify @@ -1,6 +1,6 @@ #!/usr/bin/env python -import there import dummy # This package should be available, as it was a dependency for dummy import pypi_pkg_test +import there diff --git a/tests/pipfile/setup-py-implicit/setup.py b/tests/pipfile/setup-py-implicit/setup.py index c67a722a7..ca077f415 100644 --- a/tests/pipfile/setup-py-implicit/setup.py +++ b/tests/pipfile/setup-py-implicit/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup setup( name="Dummy", diff --git a/tests/unit/contentproviders/test_dataverse.py b/tests/unit/contentproviders/test_dataverse.py index 63a584716..309a22d4a 100644 --- a/tests/unit/contentproviders/test_dataverse.py +++ b/tests/unit/contentproviders/test_dataverse.py @@ -1,15 +1,14 @@ import json import os -import pytest import re - from io import BytesIO from tempfile import TemporaryDirectory from unittest.mock import patch -from urllib.request import urlopen, Request +from urllib.request import Request, urlopen -from repo2docker.contentproviders import Dataverse +import pytest +from repo2docker.contentproviders import Dataverse test_dv = Dataverse() harvard_dv = next(_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse") diff --git a/tests/unit/contentproviders/test_doi.py b/tests/unit/contentproviders/test_doi.py index ed2dcb736..dab3d7fcd 100644 --- a/tests/unit/contentproviders/test_doi.py +++ b/tests/unit/contentproviders/test_doi.py @@ -1,17 +1,17 @@ import json +import logging import os import re -import urllib -import pytest import tempfile -import logging - -from unittest.mock import patch, MagicMock, mock_open +import urllib +from unittest.mock import MagicMock, mock_open, patch from zipfile import ZipFile -from repo2docker.contentproviders.doi import DoiProvider -from repo2docker.contentproviders.base import ContentProviderException +import pytest + from repo2docker import __version__ +from repo2docker.contentproviders.base import ContentProviderException +from repo2docker.contentproviders.doi import DoiProvider def test_content_id(): diff --git a/tests/unit/contentproviders/test_figshare.py b/tests/unit/contentproviders/test_figshare.py index 3de67e985..b4f42de31 100644 --- a/tests/unit/contentproviders/test_figshare.py +++ b/tests/unit/contentproviders/test_figshare.py @@ -1,18 +1,17 @@ import json import os import re -import pytest - from contextlib import contextmanager from io import BytesIO -from tempfile import TemporaryDirectory, NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from unittest.mock import patch -from urllib.request import urlopen, Request +from urllib.request import Request, urlopen from zipfile import ZipFile -from repo2docker.contentproviders import Figshare -from repo2docker.__main__ import make_r2d +import pytest +from repo2docker.__main__ import make_r2d +from repo2docker.contentproviders import Figshare test_content_ids = [ ("https://figshare.com/articles/title/9782777", "9782777.v1"), diff --git a/tests/unit/contentproviders/test_git.py b/tests/unit/contentproviders/test_git.py index 65779bf6d..fd63f9689 100644 --- a/tests/unit/contentproviders/test_git.py +++ b/tests/unit/contentproviders/test_git.py @@ -1,7 +1,9 @@ import os -import pytest import subprocess from tempfile import TemporaryDirectory + +import pytest + from repo2docker.contentproviders import Git diff --git a/tests/unit/contentproviders/test_hydroshare.py b/tests/unit/contentproviders/test_hydroshare.py index b0211c52f..cec541f6f 100755 --- a/tests/unit/contentproviders/test_hydroshare.py +++ b/tests/unit/contentproviders/test_hydroshare.py @@ -1,16 +1,15 @@ import os -import pytest - +import re from contextlib import contextmanager -from tempfile import TemporaryDirectory, NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from unittest.mock import patch from zipfile import ZipFile -import re + +import pytest from repo2docker.contentproviders import Hydroshare from repo2docker.contentproviders.base import ContentProviderException - doi_responses = { "https://doi.org/10.4211/hs.b8f6eae9d89241cf8b5904033460af61": ( "https://www.hydroshare.org/resource/b8f6eae9d89241cf8b5904033460af61" diff --git a/tests/unit/contentproviders/test_local.py b/tests/unit/contentproviders/test_local.py index fcb2a6a87..ef288aef7 100644 --- a/tests/unit/contentproviders/test_local.py +++ b/tests/unit/contentproviders/test_local.py @@ -1,5 +1,5 @@ import os -from tempfile import TemporaryDirectory, NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from repo2docker.contentproviders import Local diff --git a/tests/unit/contentproviders/test_mercurial.py b/tests/unit/contentproviders/test_mercurial.py index 4619ef862..0acb91d1e 100644 --- a/tests/unit/contentproviders/test_mercurial.py +++ b/tests/unit/contentproviders/test_mercurial.py @@ -8,7 +8,6 @@ from repo2docker.contentproviders import Mercurial from repo2docker.contentproviders.mercurial import args_enabling_topic - SKIP_HG = os.environ.get("REPO2DOCKER_SKIP_HG_TESTS", "").lower() not in { "", "0", diff --git a/tests/unit/contentproviders/test_swhid.py b/tests/unit/contentproviders/test_swhid.py index 48c1fa31e..56fee1dfc 100644 --- a/tests/unit/contentproviders/test_swhid.py +++ b/tests/unit/contentproviders/test_swhid.py @@ -1,22 +1,22 @@ +import io import json +import logging import os -import io -import tarfile -import shutil import re -import urllib -import pytest +import shutil +import tarfile import tempfile -import logging -import requests_mock - +import urllib from os import makedirs from os.path import join -from unittest.mock import patch, MagicMock, mock_open +from unittest.mock import MagicMock, mock_open, patch from zipfile import ZipFile -from repo2docker.contentproviders.swhid import Swhid, parse_swhid +import pytest +import requests_mock + from repo2docker.contentproviders.base import ContentProviderException +from repo2docker.contentproviders.swhid import Swhid, parse_swhid # this is a slightly stripped down copy of swh.model.cli.swhid_of_dir(). diff --git a/tests/unit/contentproviders/test_zenodo.py b/tests/unit/contentproviders/test_zenodo.py index 88e143c60..0755163da 100644 --- a/tests/unit/contentproviders/test_zenodo.py +++ b/tests/unit/contentproviders/test_zenodo.py @@ -1,15 +1,15 @@ import json import os -import pytest import re - from contextlib import contextmanager from io import BytesIO -from tempfile import TemporaryDirectory, NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from unittest.mock import patch -from urllib.request import urlopen, Request +from urllib.request import Request, urlopen from zipfile import ZipFile +import pytest + from repo2docker.contentproviders import Zenodo doi_responses = { diff --git a/tests/unit/test_app.py b/tests/unit/test_app.py index 25b39cdb1..d37103e60 100644 --- a/tests/unit/test_app.py +++ b/tests/unit/test_app.py @@ -1,13 +1,13 @@ import errno -import pytest from tempfile import TemporaryDirectory from unittest.mock import patch -import docker import escapism +import pytest -from repo2docker.app import Repo2Docker +import docker from repo2docker.__main__ import make_r2d +from repo2docker.app import Repo2Docker from repo2docker.utils import chdir diff --git a/tests/unit/test_args.py b/tests/unit/test_args.py index 404ee796c..975604ea4 100644 --- a/tests/unit/test_args.py +++ b/tests/unit/test_args.py @@ -2,9 +2,11 @@ Test argument parsing and r2d construction """ import os + import pytest -from repo2docker.__main__ import make_r2d + from repo2docker import __version__ +from repo2docker.__main__ import make_r2d def test_version(capsys): diff --git a/tests/unit/test_argumentvalidation.py b/tests/unit/test_argumentvalidation.py index d2475b89f..33e7d69af 100644 --- a/tests/unit/test_argumentvalidation.py +++ b/tests/unit/test_argumentvalidation.py @@ -7,7 +7,6 @@ import pytest - here = os.path.dirname(os.path.abspath(__file__)) test_dir = os.path.dirname(here) docker_simple = os.path.join(test_dir, "dockerfile", "simple") diff --git a/tests/unit/test_buildpack.py b/tests/unit/test_buildpack.py index eda7bb924..6a8147e30 100644 --- a/tests/unit/test_buildpack.py +++ b/tests/unit/test_buildpack.py @@ -1,7 +1,8 @@ from os.path import join as pjoin +from tempfile import TemporaryDirectory import pytest -from tempfile import TemporaryDirectory + from repo2docker.buildpacks import LegacyBinderDockerBuildPack, PythonBuildPack from repo2docker.utils import chdir diff --git a/tests/unit/test_cache_from.py b/tests/unit/test_cache_from.py index 77797b128..fd186dbc5 100644 --- a/tests/unit/test_cache_from.py +++ b/tests/unit/test_cache_from.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock import docker - from repo2docker.buildpacks import ( BaseImage, DockerBuildPack, diff --git a/tests/unit/test_clone_depth.py b/tests/unit/test_clone_depth.py index 7989b1175..55102eac1 100644 --- a/tests/unit/test_clone_depth.py +++ b/tests/unit/test_clone_depth.py @@ -8,12 +8,10 @@ """ import os import subprocess - from tempfile import TemporaryDirectory from repo2docker.app import Repo2Docker - URL = "https://github.com/binderhub-ci-repos/repo2docker-ci-clone-depth" diff --git a/tests/unit/test_connect_url.py b/tests/unit/test_connect_url.py index 7438b8aa3..dbc0c2608 100644 --- a/tests/unit/test_connect_url.py +++ b/tests/unit/test_connect_url.py @@ -1,10 +1,11 @@ """ Test if the explict hostname is supplied correctly to the container """ -import requests import time -from repo2docker.app import Repo2Docker +import requests + +from repo2docker.app import Repo2Docker # Minimal Dockerfile to make build as fast as possible DOCKER_FILE = """ diff --git a/tests/unit/test_editable.py b/tests/unit/test_editable.py index 6947b7d21..de4d6261e 100644 --- a/tests/unit/test_editable.py +++ b/tests/unit/test_editable.py @@ -5,7 +5,6 @@ from repo2docker.__main__ import make_r2d - DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "dockerfile", "editable") diff --git a/tests/unit/test_env_yml.py b/tests/unit/test_env_yml.py index c744ccf3a..dff142288 100644 --- a/tests/unit/test_env_yml.py +++ b/tests/unit/test_env_yml.py @@ -3,7 +3,9 @@ """ import os import sys + import pytest + from repo2docker import buildpacks diff --git a/tests/unit/test_external_scripts.py b/tests/unit/test_external_scripts.py index d03debe55..278707c87 100644 --- a/tests/unit/test_external_scripts.py +++ b/tests/unit/test_external_scripts.py @@ -1,5 +1,6 @@ """Test if assemble scripts from outside of r2d repo are accepted.""" import time + from repo2docker.app import Repo2Docker from repo2docker.buildpacks import PythonBuildPack diff --git a/tests/unit/test_freeze.py b/tests/unit/test_freeze.py index 66b0c3bbd..ddcd29f3e 100644 --- a/tests/unit/test_freeze.py +++ b/tests/unit/test_freeze.py @@ -2,12 +2,11 @@ from tempfile import TemporaryDirectory from unittest.mock import patch +import pytest from ruamel.yaml import YAML from repo2docker.buildpacks.conda.freeze import set_python -import pytest - V = "3.7" yaml = YAML(typ="rt") diff --git a/tests/unit/test_labels.py b/tests/unit/test_labels.py index c13cff4bf..5520de4d1 100644 --- a/tests/unit/test_labels.py +++ b/tests/unit/test_labels.py @@ -1,12 +1,13 @@ """ Test if labels are supplied correctly to the container """ -from repo2docker.app import Repo2Docker -from repo2docker.buildpacks import BuildPack -from repo2docker import __version__ -import pytest from unittest.mock import Mock +import pytest + +from repo2docker import __version__ +from repo2docker.app import Repo2Docker +from repo2docker.buildpacks import BuildPack URL = "https://github.com/binderhub-ci-repos/repo2docker-ci-clone-depth" diff --git a/tests/unit/test_memlimit.py b/tests/unit/test_memlimit.py index 1e708d4fc..ee2f2b356 100644 --- a/tests/unit/test_memlimit.py +++ b/tests/unit/test_memlimit.py @@ -3,16 +3,13 @@ """ import os - from unittest.mock import MagicMock -import docker - import pytest +import docker from repo2docker.buildpacks import BaseImage, DockerBuildPack - basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/tests/unit/test_ports.py b/tests/unit/test_ports.py index 90148ba41..aaebcdab1 100644 --- a/tests/unit/test_ports.py +++ b/tests/unit/test_ports.py @@ -2,18 +2,18 @@ Test Port mappings work on running non-jupyter workflows """ -import requests -import time import os -import tempfile import random +import tempfile +import time from getpass import getuser -import docker import pytest +import requests -from repo2docker.app import Repo2Docker +import docker from repo2docker.__main__ import make_r2d +from repo2docker.app import Repo2Docker def read_port_mapping_response( diff --git a/tests/unit/test_r.py b/tests/unit/test_r.py index 3d141cfc7..a5d7db7c0 100644 --- a/tests/unit/test_r.py +++ b/tests/unit/test_r.py @@ -1,8 +1,8 @@ from datetime import date +from unittest.mock import patch import pytest from requests.models import Response -from unittest.mock import patch from repo2docker import buildpacks diff --git a/tests/unit/test_semver.py b/tests/unit/test_semver.py index bbbf8ca50..b07272be1 100644 --- a/tests/unit/test_semver.py +++ b/tests/unit/test_semver.py @@ -1,5 +1,6 @@ import pytest from semver import VersionInfo + from repo2docker import semver diff --git a/tests/unit/test_subdir.py b/tests/unit/test_subdir.py index 4b0cd47ba..4e7007311 100644 --- a/tests/unit/test_subdir.py +++ b/tests/unit/test_subdir.py @@ -4,8 +4,8 @@ import os import escapism - import pytest + from repo2docker.app import Repo2Docker TEST_REPO = "https://github.com/binderhub-ci-repos/repo2docker-subdir-support" diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 28401e33f..cec3dc8cd 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,13 +1,15 @@ """ Tests for repo2docker/utils.py """ -import traitlets import os -from repo2docker import utils -import pytest import subprocess import tempfile +import pytest +import traitlets + +from repo2docker import utils + def test_capture_cmd_no_capture_success(): # This should succeed From 94bd21e16e32fd868b2d06476b18513a69bcd9e3 Mon Sep 17 00:00:00 2001 From: Erik Sundell Date: Mon, 31 Oct 2022 23:40:34 +0100 Subject: [PATCH 6/7] pre-commit: enable prettier for markdown --- .pre-commit-config.yaml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8bd52ff20..842120dae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,8 +42,9 @@ repos: args: - --profile=black - # # Autoformat: markdown, yaml (but not helm templates) - # - repo: https://github.com/pre-commit/mirrors-prettier - # rev: v2.7.1 - # hooks: - # - id: prettier + # Autoformat: markdown + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + files: ".md" From ae3aeb6dc5dd6deee6d663b7088eb74d225a815f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 22:40:52 +0000 Subject: [PATCH 7/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .github/ISSUE_TEMPLATE/bug_report.md | 20 ++++--- .github/ISSUE_TEMPLATE/feature_request.md | 18 +++--- .github/ISSUE_TEMPLATE/support-question.md | 7 +-- CONTRIBUTING.md | 23 ++++---- README.md | 7 +-- docs/source/architecture.md | 14 ++--- docs/source/contributing/contributing.md | 66 +++++++++++----------- docs/source/contributing/roadmap.md | 47 ++++++++------- docs/source/contributing/tasks.md | 39 ++++++------- docs/source/design.md | 5 +- tests/conda/README.md | 2 +- 11 files changed, 125 insertions(+), 123 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 2171bd672..548df5785 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,23 +1,27 @@ --- name: Bug report about: Create a report to help us repair something that is currently broken -title: '' -labels: '' -assignees: '' - +title: "" +labels: "" +assignees: "" --- + ### Bug description + #### Expected behaviour + #### Actual behaviour + ### How to reproduce + 1. Go to '...' @@ -26,9 +30,9 @@ assignees: '' 4. See error ### Your personal set up - - - OS: [e.g. linux, OSX] - - Docker version: `docker version` - - repo2docker version `repo2docker --version` + +- OS: [e.g. linux, OSX] +- Docker version: `docker version` +- repo2docker version `repo2docker --version` diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 833ba6e5f..a0bcd5a2f 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,29 +1,29 @@ --- name: Feature request about: Suggest a new feature or a big change to repo2docker -title: '' -labels: 'needs: discussion' -assignees: '' - +title: "" +labels: "needs: discussion" +assignees: "" --- + ### Proposed change - + ### Alternative options - + ### Who would use this feature? - + ### How much effort will adding it take? - + ### Who can do this work? - + diff --git a/.github/ISSUE_TEMPLATE/support-question.md b/.github/ISSUE_TEMPLATE/support-question.md index bf10a69a4..e7b543e50 100644 --- a/.github/ISSUE_TEMPLATE/support-question.md +++ b/.github/ISSUE_TEMPLATE/support-question.md @@ -1,10 +1,9 @@ --- name: Support question about: Ask a question about using repo2docker -title: '' -labels: '' -assignees: '' - +title: "" +labels: "" +assignees: "" --- 🚨 Please do **not** open an issue for support questions. Instead please search for similar issues or post on http://discourse.jupyter.org/c/questions. 🚨 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f1a583d59..1dfc19576 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,19 +8,20 @@ The repo2docker developer documentation is all rendered on our documentation web If you're here, you're probably looking for the [Contributing to repo2docker development](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html) page. Please make sure you've read the following sections before opening an issue/pull request: -* [Process for making a contribution](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#process-for-making-a-contribution). - * These steps talk you through choosing the right issue template (bug report or feature request) and making a change. -* [Guidelines to getting a Pull Request merged](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#guidelines-to-getting-a-pull-request-merged). - * These are tips and tricks to help make your contribution as smooth as possible for you and for the repo2docker maintenance team. + +- [Process for making a contribution](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#process-for-making-a-contribution). + - These steps talk you through choosing the right issue template (bug report or feature request) and making a change. +- [Guidelines to getting a Pull Request merged](https://repo2docker.readthedocs.io/en/latest/contributing/contributing.html#guidelines-to-getting-a-pull-request-merged). + - These are tips and tricks to help make your contribution as smooth as possible for you and for the repo2docker maintenance team. There are a few other pages to highlight: -* [Our roadmap](https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html) - * We use the roadmap to develop a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers. +- [Our roadmap](https://repo2docker.readthedocs.io/en/latest/contributing/roadmap.html) + - We use the roadmap to develop a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers. This is a great place to get a feel for what the maintainers are thinking about for the short, medium, and long term future of the project. -* [Design of repo2docker](https://repo2docker.readthedocs.io/en/latest/design.html) - * This page explains some of the design principles behind repo2docker. +- [Design of repo2docker](https://repo2docker.readthedocs.io/en/latest/design.html) + - This page explains some of the design principles behind repo2docker. Its a good place to understand _why_ the team have made the decisions that they have along the way! - * We absolutely encourage discussion around refactoring, updating or extending repo2docker, but please make sure that you've understood this page before opening an issue to discuss the change you'd like to propose. -* [Common developer tasks and how-tos](https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html) - * Some notes on running tests, buildpack dependencies, creating a release, and keeping the pip files up to date. + - We absolutely encourage discussion around refactoring, updating or extending repo2docker, but please make sure that you've understood this page before opening an issue to discuss the change you'd like to propose. +- [Common developer tasks and how-tos](https://repo2docker.readthedocs.io/en/latest/contributing/tasks.html) + - Some notes on running tests, buildpack dependencies, creating a release, and keeping the pip files up to date. diff --git a/README.md b/README.md index 84949e84f..01d60eebc 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ For more information, please visit --- ## Using repo2docker + ### Prerequisites 1. Docker to build & run the repositories. The [community edition](https://store.docker.com/search?type=edition&offering=community) @@ -83,21 +84,19 @@ something like: If you copy paste that URL into your browser you will see a Jupyter Notebook with the contents of the repository you had just built! -For more information on how to use ``repo2docker``, see the +For more information on how to use `repo2docker`, see the [usage guide](http://repo2docker.readthedocs.io/en/latest/usage.html). - ## Repository specifications Repo2Docker looks for configuration files in the source repository to determine how the Docker image should be built. For a list of the configuration -files that ``repo2docker`` can use, see the +files that `repo2docker` can use, see the [complete list of configuration files](https://repo2docker.readthedocs.io/en/latest/config_files.html). The philosophy of repo2docker is inspired by [Heroku Build Packs](https://devcenter.heroku.com/articles/buildpacks). - ## Docker Image Repo2Docker can be run inside a Docker container if access to the Docker Daemon is provided, for example see [BinderHub](https://github.com/jupyterhub/binderhub). Docker images are [published to quay.io](https://quay.io/repository/jupyterhub/repo2docker?tab=tags). The old [Docker Hub image](https://hub.docker.com/r/jupyter/repo2docker) is no longer supported. diff --git a/docs/source/architecture.md b/docs/source/architecture.md index 153855188..0153c8ae4 100644 --- a/docs/source/architecture.md +++ b/docs/source/architecture.md @@ -4,6 +4,7 @@ This is a living document talking about the architecture of repo2docker from various perspectives. (buildpacks)= + ## Buildpacks The **buildpack** concept comes from [Heroku](https://devcenter.heroku.com/articles/buildpacks) @@ -57,7 +58,7 @@ and basic notebook packages (from `repo2docker/buildpacks/conda/environment.yml` to be the same for most repositories built with `CondaBuildPack`, so we want to use [docker layer caching](https://thenewstack.io/understanding-the-docker-cache-for-faster-builds/) as much as possible for performance reasons. Next time a repository is built with `CondaBuildPack`, -we can skip straight to the **copy** step (since the base environment docker image *layers* have +we can skip straight to the **copy** step (since the base environment docker image _layers_ have already been built and cached). The `get_build_scripts` and `get_build_script_files` methods are primarily used for this. @@ -65,11 +66,11 @@ The `get_build_scripts` and `get_build_script_files` methods are primarily used and `get_build_script_files` is used to copy specific scripts (such as a conda installer) into the image to be run as pat of `get_build_scripts`. Code in either has following constraints: -1. You can *not* use the contents of repository in them, since this happens before the repository +1. You can _not_ use the contents of repository in them, since this happens before the repository is copied into the image. For example, `pip install -r requirements.txt` will not work, since there's no `requirements.txt` inside the image at this point. This is an explicit design decision, to enable better layer caching. -2. You *may*, however, read the contents of the repository and modify the scripts emitted based +2. You _may_, however, read the contents of the repository and modify the scripts emitted based on that! For example, in `CondaBuildPack`, if there's Python 2 specified in `environment.yml`, a different kind of environment is set up. The reading of the `environment.yml` is performed in the BuildPack itself, and not in the scripts returned by `get_build_scripts`. This is fine. @@ -118,7 +119,7 @@ a path to a repository. This might be a local path or a URL. Upon being called, `repo2docker` will loop through all ContentProviders and perform the following commands: -* Run the `detect()` method on the repository path given to `repo2docker`. This +- Run the `detect()` method on the repository path given to `repo2docker`. This should return any value other than `None` if the path matches what the ContentProvider is looking for. @@ -126,12 +127,11 @@ commands: > checks whether the argument is a valid local path. If so, then `detect(` > returns a dictionary: `{'path': source}` which defines the path to the repository. > This path is used by `fetch()` to check that it matches the output directory. -* If `detect()` returns something other than `None`, run `fetch()` with the + +- If `detect()` returns something other than `None`, run `fetch()` with the returned value as its argument. This should result in the contents of the repository being placed locally to a folder. For more information on ContentProviders, take a look at [the ContentProvider base class](https://github.com/jupyterhub/repo2docker/blob/80b979f8580ddef184d2ba7d354e7a833cfa38a4/repo2docker/contentproviders/base.py#L16-L60) which has more explanation. - - diff --git a/docs/source/contributing/contributing.md b/docs/source/contributing/contributing.md index 7d101dfe6..8e26bcda2 100644 --- a/docs/source/contributing/contributing.md +++ b/docs/source/contributing/contributing.md @@ -2,33 +2,33 @@ Thank you for thinking about contributing to repo2docker! This is an open source project that is developed and maintained entirely by volunteers. -*Your contribution* is integral to the future of the project. +_Your contribution_ is integral to the future of the project. THANK YOU! ## Types of contribution There are many ways to contribute to repo2docker: -* **Update the documentation.** +- **Update the documentation.** If you're reading a page or docstring and it doesn't make sense (or doesn't exist!), please let us know by opening a bug report. It's even more amazing if you can give us a suggested change. -* **Fix bugs or add requested features.** +- **Fix bugs or add requested features.** Have a look through the [issue tracker](https://github.com/jupyterhub/repo2docker/issues) and see if there are any tagged as ["help wanted"](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22). As the label suggests, we'd love your help! -* **Report a bug.** +- **Report a bug.** If repo2docker isn't doing what you thought it would do then open a [bug report](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md). That issue template will ask you a few questions described in more detail below. -* **Suggest a new feature.** +- **Suggest a new feature.** We know that there are lots of ways to extend repo2docker! If you're interested in adding a feature then please open a [feature request](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md). That issue template will ask you a few questions described in detail below. -* **Review someone's Pull Request.** +- **Review someone's Pull Request.** Whenever somebody proposes changes to the repo2docker codebase, the community reviews the changes, and provides feedback, edits, and suggestions. Check out the [open pull requests](https://github.com/jupyterhub/repo2docker/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc) and provide feedback that helps improve the PR and get it merged. Please keep your feedback positive and constructive! -* **Tell people about repo2docker.** +- **Tell people about repo2docker.** As we said above, repo2docker is built by and for its community. If you know anyone who would like to use repo2docker, please tell them about the project! You could give a talk about it, or run a demonstration. @@ -42,31 +42,31 @@ This outlines the process for getting changes to the repo2docker project merged. 1. Identify the correct issue template: [bug report](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md) or [feature request](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md). - **Bug reports** ([examples](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3Abug), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md)) will ask you for a description of the problem, the expected behaviour, the actual behaviour, how to reproduce the problem, and your personal set up. - Bugs can include problems with the documentation, or code not running as expected. + **Bug reports** ([examples](https://github.com/jupyterhub/repo2docker/issues?q=is%3Aissue+is%3Aopen+label%3Abug), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=bug_report.md)) will ask you for a description of the problem, the expected behaviour, the actual behaviour, how to reproduce the problem, and your personal set up. + Bugs can include problems with the documentation, or code not running as expected. - It is really important that you make it easy for the maintainers to reproduce the problem you're having. - This guide on creating a [minimal, complete and verifiable example](https://stackoverflow.com/help/mcve) is a great place to start. + It is really important that you make it easy for the maintainers to reproduce the problem you're having. + This guide on creating a [minimal, complete and verifiable example](https://stackoverflow.com/help/mcve) is a great place to start. - **Feature requests** ([examples](https://github.com/jupyterhub/repo2docker/labels/needs%3A%20discussion), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md)) will ask you for the proposed change, any alternatives that you have considered, a description of who would use this feature, and a best-guess of how much work it will take and what skills are required to accomplish. + **Feature requests** ([examples](https://github.com/jupyterhub/repo2docker/labels/needs%3A%20discussion), [new issue](https://github.com/jupyterhub/repo2docker/issues/new?template=feature_request.md)) will ask you for the proposed change, any alternatives that you have considered, a description of who would use this feature, and a best-guess of how much work it will take and what skills are required to accomplish. - Very easy feature requests might be updates to the documentation to clarify steps for new users. - Harder feature requests may be to add new functionality to the project and will need more in depth discussion about who can complete and maintain the work. + Very easy feature requests might be updates to the documentation to clarify steps for new users. + Harder feature requests may be to add new functionality to the project and will need more in depth discussion about who can complete and maintain the work. - Feature requests are a great opportunity for you to advocate for the use case you're suggesting. - They help others understand how much effort it would be to integrate the work,and - if you're successful at convincing them that this effort is worth it - make it more likely that they to choose to work on it with you. + Feature requests are a great opportunity for you to advocate for the use case you're suggesting. + They help others understand how much effort it would be to integrate the work,and - if you're successful at convincing them that this effort is worth it - make it more likely that they to choose to work on it with you. 2. Open an issue. - Getting consensus with the community is a great way to save time later. + Getting consensus with the community is a great way to save time later. 3. Make edits in [your fork](https://help.github.com/en/articles/fork-a-repo) of the [repo2docker repository](https://github.com/jupyterhub/repo2docker). 4. Make a [pull request](https://help.github.com/en/articles/about-pull-requests). -Read the [next section](guidelines-to-getting-a-pull-request-merged) for guidelines for both reviewers and contributors on merging a PR. -6. Wait for a community member to merge your changes. - Remember that **someone else must merge your pull request**. - That goes for new contributors and long term maintainers alike. - Because `main` is continuously deployed to mybinder.org it is essential - that `main` is always in a deployable state. -7. (optional) Deploy a new version of repo2docker to mybinder.org by [following these steps](http://mybinder-sre.readthedocs.io/en/latest/deployment/how.html) + Read the [next section](guidelines-to-getting-a-pull-request-merged) for guidelines for both reviewers and contributors on merging a PR. +5. Wait for a community member to merge your changes. + Remember that **someone else must merge your pull request**. + That goes for new contributors and long term maintainers alike. + Because `main` is continuously deployed to mybinder.org it is essential + that `main` is always in a deployable state. +6. (optional) Deploy a new version of repo2docker to mybinder.org by [following these steps](http://mybinder-sre.readthedocs.io/en/latest/deployment/how.html) (guidelines-to-getting-a-pull-request-merged)= @@ -74,26 +74,27 @@ Read the [next section](guidelines-to-getting-a-pull-request-merged) for guideli These are not hard rules to be enforced by 🚓 but they are suggestions written by the repo2docker maintainers to help complete your contribution as smoothly as possible for both you and for them. -* **Create a PR as early as possible**, marking it with `[WIP]` while you work on it. +- **Create a PR as early as possible**, marking it with `[WIP]` while you work on it. This avoids duplicated work, lets you get high level feedback on functionality or API changes, and/or helps find collaborators to work with you. -* **Keep your PR focused.** +- **Keep your PR focused.** The best PRs solve one problem. If you end up changing multiple things, please open separate PRs for the different conceptual changes. -* **Add tests to your code.** +- **Add tests to your code.** PRs will not be merged if Travis is failing. -* **Apply [PEP8](https://www.python.org/dev/peps/pep-0008/)** as much as possible, but not too much. +- **Apply [PEP8](https://www.python.org/dev/peps/pep-0008/)** as much as possible, but not too much. If in doubt, ask. -* **Use merge commits** instead of merge-by-squashing/-rebasing. +- **Use merge commits** instead of merge-by-squashing/-rebasing. This makes it easier to find all changes since the last deployment `git log --merges --pretty=format:"%h %<(10,trunc)%an %<(15)%ar %s" ..` and your PR easier to review. -* **Make it clear when your PR is ready for review.** +- **Make it clear when your PR is ready for review.** Prefix the title of your pull request (PR) with `[MRG]` if the contribution is complete and should be subjected to a detailed review. -* **Use commit messages to describe _why_ you are proposing the changes you are proposing.** -* **Try to not rush changes** (the definition of rush depends on how big your changes are). +- **Use commit messages to describe _why_ you are proposing the changes you are proposing.** +- **Try to not rush changes** (the definition of rush depends on how big your changes are). Remember that everyone in the repo2docker team is a volunteer and we can not (nor would we want to) control their time or interests. Wait patiently for a reviewer to merge the PR. (Remember that **someone else** must merge your PR, even if you have the admin rights to do so.) (contributing:local-dev)= + ## Setting up for Local Development To develop & test repo2docker locally, you need: @@ -149,7 +150,6 @@ according to black's style guide. You can activate it with `pre-commit install`. As part of our continuous integration tests we will check that code is formatted properly and the tests will fail if this is not the case. - ### Verify that docker is installed and running If you do not already have [Docker](https://www.docker.com/), you should be able diff --git a/docs/source/contributing/roadmap.md b/docs/source/contributing/roadmap.md index 0a620cadc..4148a7a79 100644 --- a/docs/source/contributing/roadmap.md +++ b/docs/source/contributing/roadmap.md @@ -7,6 +7,7 @@ The goal is to communicate priorities and upcoming release plans. It is not a aimed at limiting contributions to what is listed here. ## Using the roadmap + ### Sharing Feedback on the Roadmap All of the community is encouraged to provide feedback as well as share new @@ -16,24 +17,22 @@ After submitting the issue, others from the community will probably respond with questions or comments they have to clarify the issue. The maintainers will help identify what a good next step is for the issue. - ### What do we mean by "next step"? When submitting an issue, think about what "next step" category best describes your issue: -* **now**, concrete/actionable step that is ready for someone to start work on. -These might be items that have a link to an issue or more abstract like -"decrease typos and dead links in the documentation" -* **soon**, less concrete/actionable step that is going to happen soon, -discussions around the topic are coming close to an end at which point it can -move into the "now" category -* **later**, abstract ideas or tasks, need a lot of discussion or -experimentation to shape the idea so that it can be executed. Can also -contain concrete/actionable steps that have been postponed on purpose -(these are steps that could be in "now" but the decision was taken to work on -them later) - +- **now**, concrete/actionable step that is ready for someone to start work on. + These might be items that have a link to an issue or more abstract like + "decrease typos and dead links in the documentation" +- **soon**, less concrete/actionable step that is going to happen soon, + discussions around the topic are coming close to an end at which point it can + move into the "now" category +- **later**, abstract ideas or tasks, need a lot of discussion or + experimentation to shape the idea so that it can be executed. Can also + contain concrete/actionable steps that have been postponed on purpose + (these are steps that could be in "now" but the decision was taken to work on + them later) ### Reviewing and Updating the Roadmap @@ -48,22 +47,21 @@ For those please create a The roadmap should give the reader an idea of what is happening next, what needs input and discussion before it can happen and what has been postponed. - ## The roadmap proper + ### Project vision Repo2docker is a dependable tool used by humans that reduces the complexity of creating the environment in which a piece of software can be executed. - ### Now The "Now" items are being actively worked on by the project: -* reduce documentation typos and syntax errors -* increase test coverage to 80% (see https://codecov.io/gh/jupyterhub/repo2docker/tree/main/repo2docker for low coverage files) -* mounting repository contents in locations that is not `/home/jovyan` -* investigate options for pinning repo2docker versions ([#490](https://github.com/jupyterhub/repo2docker/issues/490)) +- reduce documentation typos and syntax errors +- increase test coverage to 80% (see https://codecov.io/gh/jupyterhub/repo2docker/tree/main/repo2docker for low coverage files) +- mounting repository contents in locations that is not `/home/jovyan` +- investigate options for pinning repo2docker versions ([#490](https://github.com/jupyterhub/repo2docker/issues/490)) ### Soon @@ -71,15 +69,16 @@ The "Soon" items are being discussed/a plan of action is being made. Once an item reaches the point of an actionable plan and person who wants to work on it, the item will be moved to the "Now" section. Typically, these will be moved at a future review of the roadmap. -* create the contributor highway, define the route from newcomer to project lead -* add Julia Manifest support (https://docs.julialang.org/en/v1/stdlib/Pkg/index.html, [#486](https://github.com/jupyterhub/repo2docker/issues/486)) -* support different base images/build pack stacks ([#487](https://github.com/jupyterhub/repo2docker/issues/487)) +- create the contributor highway, define the route from newcomer to project lead +- add Julia Manifest support (https://docs.julialang.org/en/v1/stdlib/Pkg/index.html, [#486](https://github.com/jupyterhub/repo2docker/issues/486)) +- support different base images/build pack stacks ([#487](https://github.com/jupyterhub/repo2docker/issues/487)) ### Later The "Later" items are things that are at the back of the project's mind. At this time there is no active plan for an item. The project would like to find the resources and time to discuss and then execute these ideas. -* support execution on a remote host (with more resources than available locally) via the command-line -* add support for using ZIP files as the repo (`repo2docker https://example.com/an-archive.zip`) + +- support execution on a remote host (with more resources than available locally) via the command-line +- add support for using ZIP files as the repo (`repo2docker https://example.com/an-archive.zip`) diff --git a/docs/source/contributing/tasks.md b/docs/source/contributing/tasks.md index 3fe47585c..6deae1c0f 100644 --- a/docs/source/contributing/tasks.md +++ b/docs/source/contributing/tasks.md @@ -32,17 +32,18 @@ py.test -s tests/ To skip the tests related to Mercurial repositories (to avoid to install Mercurial and hg-evolve), one can use the environment variable -``REPO2DOCKER_SKIP_HG_TESTS``. +`REPO2DOCKER_SKIP_HG_TESTS`. ### Troubleshooting Tests Some of the tests have non-python requirements for your development machine. They are: - `git-lfs` must be installed ([instructions](https://github.com/git-lfs/git-lfs)). It need not be activated -- there is no need to run the `git lfs install` command. It just needs to be available to the test suite. - - If your test failure messages include "`git-lfs filter-process: git-lfs: command not found`", this step should address the problem. + + - If your test failure messages include "`git-lfs filter-process: git-lfs: command not found`", this step should address the problem. - Minimum Docker Image size of 128GB is required. If you are not running docker on a linux OS, you may need to expand the runtime image size for your installation. See Docker's instructions for [macOS](https://docs.docker.com/docker-for-mac/space/) or [Windows 10](https://docs.docker.com/docker-for-windows/#resources) for more information. - - If your test failure messages include "`No space left on device: '/home/...`", this step should address the problem. + - If your test failure messages include "`No space left on device: '/home/...`", this step should address the problem. ## Update and Freeze BuildPack Dependencies @@ -51,35 +52,36 @@ dependencies that are installed by default for several buildpacks. For both the `conda` and `virtualenv` (`pip`) base environments in the **Conda BuildPack** and **Python BuildPack**, we install specific pinned versions of all dependencies. We explicitly list the dependencies -we want, then *freeze* them at commit time to explicitly list all the +we want, then _freeze_ them at commit time to explicitly list all the transitive dependencies at current versions. This way, we know that all dependencies will have the exact same version installed at all times. To update one of the dependencies shared across all `repo2docker` builds, you must follow these steps (with more detailed information in the sections below): -1. Bump the version numbers of the dependencies you want to update in the `conda` environment ([link](tasks:conda-dependencies)) +1. Bump the version numbers of the dependencies you want to update in the `conda` environment ([link](tasks:conda-dependencies)) 2. Make a pull request with your changes ([link](https://github.com/jupyterhub/repo2docker/blob/HEAD/CONTRIBUTING.md#make-a-pull-request)) See the subsections below for more detailed instructions. (tasks:conda-dependencies)= + ### Conda dependencies 1. There are two files related to conda dependencies. Edit as needed. - - `repo2docker/buildpacks/conda/environment.yml` + - `repo2docker/buildpacks/conda/environment.yml` - Contains list of packages to install in Python3 conda environments, - which are the default. **This is where all Notebook versions & - notebook extensions (such as JupyterLab / nteract) go**. + Contains list of packages to install in Python3 conda environments, + which are the default. **This is where all Notebook versions & + notebook extensions (such as JupyterLab / nteract) go**. - - `repo2docker/buildpacks/conda/environment.py-2.7.yml` + - `repo2docker/buildpacks/conda/environment.py-2.7.yml` - Contains list of packages to install in Python2 conda environments, which - can be specifically requested by users. **This only needs `IPyKernel` - and kernel related libraries**. Notebook / Notebook Extension need - not be installed here. + Contains list of packages to install in Python2 conda environments, which + can be specifically requested by users. **This only needs `IPyKernel` + and kernel related libraries**. Notebook / Notebook Extension need + not be installed here. 2. Once you edit either of these files to add a new package / bump version on an existing package, you should then run: @@ -147,14 +149,13 @@ Once this has completed, make sure that the new version has been updated. Once the new release has been pushed to PyPI, we need to create a new release on the [GitHub repository releases page](https://github.com/jupyterhub/repo2docker/releases). Once on that page, follow these steps: -* Click "Draft a new release" -* Choose a tag version using the same tag you just created above -* The release name is simply the tag version -* Finally, click "Publish release" +- Click "Draft a new release" +- Choose a tag version using the same tag you just created above +- The release name is simply the tag version +- Finally, click "Publish release" That's it! - # Uncommon tasks ## Compare generated Dockerfiles between repo2docker versions diff --git a/docs/source/design.md b/docs/source/design.md index e70835637..7f6536941 100644 --- a/docs/source/design.md +++ b/docs/source/design.md @@ -7,8 +7,7 @@ The philosophy for the repo2docker buildpacks includes: - using common configuration files for familiar installation and packaging tools - allowing configuration files to be combined to compose more complex setups - specifying default locations for configuration files -(in the repository's root, `binder` or `.binder` directory) - + (in the repository's root, `binder` or `.binder` directory) When designing `repo2docker` and adding to it in the future, the developers are influenced by two primary use cases. @@ -79,7 +78,7 @@ is a highly recommended quick read. Although other projects, like [s2i](https://github.com/openshift/source-to-image), exist to convert source to Docker images, `repo2docker` provides the additional functionality to support -*composable* environments. We want to easily have an image with +_composable_ environments. We want to easily have an image with Python3+Julia+R-3.2 environments, rather than just one single language environment. While generally one language environment per container works well, in many scientific / datascience computing environments you need multiple diff --git a/tests/conda/README.md b/tests/conda/README.md index 156976194..6f137f615 100644 --- a/tests/conda/README.md +++ b/tests/conda/README.md @@ -57,7 +57,7 @@ `--target-repo-dir` is meant to support custom paths where repositories can be copied to besides `${HOME}`. - + This test makes use of the `test-extra-args.yaml` file to influence additional arguments passed to `repo2docker` during the test. In this test, specify `--target-repo-dir=/srv/repo`.