Skip to content

Commit

Permalink
refactor: update logging messages
Browse files Browse the repository at this point in the history
Change the default log handler to show level (e.g., DEBUG, INFO, etc.)
in each log output entry. The multiline messages were formatted (back)
to not automatically include leading indentation for successive lines.
Other log messages were reformatted to fit in constrained console widths
or to change the level.
  • Loading branch information
maxrake committed Nov 8, 2023
1 parent 8c8aae7 commit 5dfa874
Show file tree
Hide file tree
Showing 10 changed files with 105 additions and 79 deletions.
40 changes: 26 additions & 14 deletions src/phylum/ci/ci_azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,24 +36,36 @@
from phylum.logger import LOG

AZURE_PAT_ERR_MSG = """
An Azure DevOps token with API access is required to use the API (e.g., to post comments).
This can be the default `System.AccessToken` provided automatically at the start
of each build for the scoped build identity or a personal access token (PAT).
An Azure DevOps token with API access is required to use the API
(e.g., to post comments). This can be the default `System.AccessToken`
provided automatically at the start of each build for the scoped
build identity or a personal access token (PAT).
A PAT needs at least the `Pull Request Threads` scope (read & write).
See the Azure DevOps documentation for using personal access tokens:
See Azure DevOps documentation for using personal access tokens:
* https://learn.microsoft.com/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate
The `System.AccessToken` scoped build identity needs at least the `Contribute to pull requests` permission.
See the Azure DevOps documentation for using the `System.AccessToken`:
The `System.AccessToken` scoped build identity needs
at least the `Contribute to pull requests` permission.
See Azure DevOps documentation for using the `System.AccessToken`:
* https://learn.microsoft.com/azure/devops/pipelines/build/variables#systemaccesstoken
* https://learn.microsoft.com/azure/devops/pipelines/process/access-tokens#job-authorization-scope
"""

GITHUB_PAT_ERR_MSG = """
A GitHub token with API access is required to use the API (e.g., to post comments).
This can be either a classic or fine-grained personal access token (PAT).
A classic PAT needs the `repo` scope or minimally the `public_repo` scope if private repositories are not used.
A fine-grained PAT needs read access to `metadata` and read/write access to `pull requests`.
See the GitHub Token Documentation for more info:
A GitHub token with API access is required to use the
API (e.g., to post comments). This can be either a
classic or fine-grained personal access token (PAT).
A classic PAT needs the `repo` scope or minimally the
`public_repo` scope if private repositories are not used.
A fine-grained PAT needs read access to `metadata` and
read/write access to `pull requests`.
See GitHub Token Documentation for more info:
* https://docs.github.com/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token
* https://docs.github.com/rest/overview/permissions-required-for-fine-grained-personal-access-tokens
* https://docs.github.com/developers/apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes
Expand Down Expand Up @@ -219,8 +231,8 @@ def common_ancestor_commit(self) -> Optional[str]:
except subprocess.CalledProcessError as err:
msg = """\
The common ancestor commit could not be found.
Ensure shallow fetch is disabled for repo checkouts:
https://learn.microsoft.com/azure/devops/pipelines/yaml-schema/steps-checkout#shallow-fetch"""
Ensure shallow fetch is disabled for repo checkouts:
https://learn.microsoft.com/azure/devops/pipelines/yaml-schema/steps-checkout#shallow-fetch"""
pprint_subprocess_error(err)
LOG.warning(textwrap.dedent(msg))
common_commit = None
Expand All @@ -239,7 +251,7 @@ def is_any_depfile_changed(self) -> bool:

err_msg = """\
Consider changing the `fetchDepth` property in CI settings to clone/fetch more branch history.
Reference: https://learn.microsoft.com/azure/devops/pipelines/yaml-schema/steps-checkout"""
Reference: https://learn.microsoft.com/azure/devops/pipelines/yaml-schema/steps-checkout"""
self.update_depfiles_change_status(diff_base_sha, err_msg)

return any(depfile.is_depfile_changed for depfile in self.depfiles)
Expand Down
44 changes: 22 additions & 22 deletions src/phylum/ci/ci_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ def depfiles(self) -> Depfiles:

msg = """\
No valid dependency files were detected.
Consider specifying at least one with
`--lockfile` argument or in `.phylum_project` file."""
Consider specifying at least one with
`--lockfile` argument or in `.phylum_project` file."""
raise SystemExit(textwrap.dedent(msg))

@progress_spinner("Filtering dependency files")
Expand All @@ -197,11 +197,11 @@ def _filter_depfiles(self, provided_depfiles: LockfileEntries) -> Depfiles:
pprint_subprocess_error(err)
msg = f"""\
Provided dependency file [code]{provided_depfile!r}[/] failed to parse as
lockfile type [code]{provided_depfile.type}[/]. If this is a manifest, consider
supplying lockfile type explicitly in the `.phylum_project` file.
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{provided_depfile!r}[/]
is a valid [code]{provided_depfile.type}[/] dependency file."""
lockfile type [code]{provided_depfile.type}[/]. If this is a manifest, consider
supplying lockfile type explicitly in the `.phylum_project` file.
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{provided_depfile!r}[/]
is a valid [code]{provided_depfile.type}[/] dependency file."""
LOG.warning(textwrap.dedent(msg), extra=MARKUP)
self.returncode = ReturnCode.DEPFILE_FILTER
continue
Expand All @@ -210,20 +210,20 @@ def _filter_depfiles(self, provided_depfiles: LockfileEntries) -> Depfiles:
if provided_depfile in self.potential_manifests and provided_depfile in self.potential_lockfiles:
msg = f"""\
Provided dependency file [code]{provided_depfile!r}[/] is a [b]lockifest[/].
It will be treated as a [b]manifest[/].
For more info, see: https://docs.phylum.io/docs/lockfile_generation"""
It will be treated as a [b]manifest[/].
For more info, see: https://docs.phylum.io/docs/lockfile_generation"""
LOG.warning(textwrap.dedent(msg), extra=MARKUP)
depfile = Depfile(provided_depfile, self.cli_path, DepfileType.LOCKIFEST)
elif provided_depfile in self.potential_manifests:
LOG.debug("Provided dependency file [code]%r[/] is a [b]manifest[/]", provided_depfile, extra=MARKUP)
LOG.info("Provided dependency file [code]%r[/] is a [b]manifest[/]", provided_depfile, extra=MARKUP)
depfile = Depfile(provided_depfile, self.cli_path, DepfileType.MANIFEST)
elif provided_depfile in self.potential_lockfiles:
LOG.debug("Provided dependency file [code]%r[/] is a [b]lockfile[/]", provided_depfile, extra=MARKUP)
LOG.info("Provided dependency file [code]%r[/] is a [b]lockfile[/]", provided_depfile, extra=MARKUP)
depfile = Depfile(provided_depfile, self.cli_path, DepfileType.LOCKFILE)
else:
msg = f"""\
Provided dependency file [code]{provided_depfile!r}[/] is an [b]unknown[/] type.
It will be treated as a [b]manifest[/]."""
It will be treated as a [b]manifest[/]."""
LOG.warning(textwrap.dedent(msg), extra=MARKUP)
depfile = Depfile(provided_depfile, self.cli_path, DepfileType.UNKNOWN)
depfiles.append(depfile)
Expand All @@ -232,7 +232,7 @@ def _filter_depfiles(self, provided_depfiles: LockfileEntries) -> Depfiles:
if any(depfile.is_manifest for depfile in depfiles):
msg = """\
At least one manifest file was included.
Forcing analysis to ensure updated dependencies are included."""
Forcing analysis to ensure updated dependencies are included."""
LOG.warning(textwrap.dedent(msg))
self._force_analysis = True

Expand Down Expand Up @@ -480,8 +480,8 @@ def _ensure_project_exists(self) -> None:
return
msg = """\
There was a problem creating the project.
A PRO account is needed to create a project with a group.
If the command was expected to succeed, please report this as a bug."""
A PRO account is needed to create a project with a group.
If the command was expected to succeed, please report this as a bug."""
raise PhylumCalledProcessError(err, textwrap.dedent(msg)) from err
LOG.info("Project %s created successfully", self.phylum_project)
if self._project_file_already_existed:
Expand Down Expand Up @@ -546,8 +546,8 @@ def analyze(self) -> None:
else:
msg = """\
There was a problem analyzing the project.
A PRO account is needed to use groups.
If the command was expected to succeed, please report this as a bug."""
A PRO account is needed to use groups.
If the command was expected to succeed, please report this as a bug."""
raise PhylumCalledProcessError(err, textwrap.dedent(msg)) from err

self._parse_analysis_result(analysis_result)
Expand All @@ -568,11 +568,11 @@ def _get_base_packages(self) -> Packages:
pprint_subprocess_error(err)
msg = f"""\
Due to error, assuming no previous packages in [code]{depfile!r}[/].
Consider supplying lockfile type explicitly in `.phylum_project` file.
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{depfile!r}[/]
is a valid [code]{depfile.type}[/] [b]{depfile.depfile_type.value}[/] at revision
[code]{self.common_ancestor_commit}[/]."""
Consider supplying lockfile type explicitly in `.phylum_project` file.
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{depfile!r}[/]
is a valid [code]{depfile.type}[/] [b]{depfile.depfile_type.value}[/] at revision
[code]{self.common_ancestor_commit}[/]."""
LOG.warning(textwrap.dedent(msg), extra=MARKUP)
continue
base_packages.update(prev_depfile_pkgs)
Expand Down
14 changes: 8 additions & 6 deletions src/phylum/ci/ci_bitbucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,11 @@
from phylum.logger import LOG

BITBUCKET_TOK_ERR_MSG = """
A Bitbucket access token with API access is required to use the API (e.g., to post comments).
This can be a repository, project, or workspace token with at least the `pullrequest` scope.
See the Bitbucket documentation for using access tokens:
A Bitbucket access token with API access is required to use the
API (e.g., to post comments). This can be a repository, project,
or workspace token with at least the `pullrequest` scope.
See Bitbucket documentation for using access tokens:
* https://developer.atlassian.com/cloud/bitbucket/rest/intro/#access-tokens
"""

Expand Down Expand Up @@ -179,8 +181,8 @@ def common_ancestor_commit(self) -> Optional[str]:
except subprocess.CalledProcessError as err:
msg = """\
The common ancestor commit could not be found.
Ensure the git strategy is set to `full clone depth` for repo checkouts:
https://support.atlassian.com/bitbucket-cloud/docs/git-clone-behavior/"""
Ensure the git strategy is set to `full clone depth` for repo checkouts:
https://support.atlassian.com/bitbucket-cloud/docs/git-clone-behavior/"""
pprint_subprocess_error(err)
LOG.warning(textwrap.dedent(msg))
common_commit = None
Expand All @@ -199,7 +201,7 @@ def is_any_depfile_changed(self) -> bool:

err_msg = """\
Consider changing the `clone depth` variable in CI settings to clone/fetch more branch history.
Reference: https://support.atlassian.com/bitbucket-cloud/docs/git-clone-behavior/"""
Reference: https://support.atlassian.com/bitbucket-cloud/docs/git-clone-behavior/"""
self.update_depfiles_change_status(diff_base_sha, err_msg)

return any(depfile.is_depfile_changed for depfile in self.depfiles)
Expand Down
32 changes: 20 additions & 12 deletions src/phylum/ci/ci_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,21 @@
from phylum.logger import LOG

PAT_ERR_MSG = """
A GitHub token with API access is required to use the API (e.g., to post comments).
This can be the default `GITHUB_TOKEN` provided automatically at the start of each workflow run.
It can also be either a classic or fine-grained personal access token (PAT).
A `GITHUB_TOKEN` needs at least write access for `pull-requests` scope (even though the `issues` API is used).
A classic PAT needs the `repo` scope or minimally the `public_repo` scope if private repositories are not used.
A fine-grained PAT needs read access to `metadata` and read/write access to `pull requests`.
See the GitHub Token Documentation for more info:
A GitHub token with API access is required to use the API
(e.g., to post comments). This can be the default `GITHUB_TOKEN`
provided automatically at the start of each workflow run. It can also
be either a classic or fine-grained personal access token (PAT).
A `GITHUB_TOKEN` needs at least write access for `pull-requests`
scope (even though the `issues` API is used).
A classic PAT needs the `repo` scope or minimally the `public_repo`
scope if private repositories are not used.
A fine-grained PAT needs read access to `metadata` and read/write
access to `pull requests`.
See GitHub Token Documentation for more info:
* https://docs.github.com/actions/security-guides/automatic-token-authentication
* https://docs.github.com/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token
* https://docs.github.com/rest/overview/permissions-required-for-fine-grained-personal-access-tokens
Expand All @@ -57,10 +65,10 @@ def __init__(self, args: Namespace) -> None: # noqa: D107 ; the base __init__ d
except subprocess.CalledProcessError as err:
msg = f"""\
Adding the GitHub workspace `{github_workspace}` as a safe
directory in the git config failed. This is the recommended workaround
for container actions, to avoid the `unsafe repository` error.
See https://github.com/actions/checkout/issues/766 (git CVE-2022-24765)
for more detail."""
directory in the git config failed. This is the recommended workaround
for container actions, to avoid the `unsafe repository` error.
See https://github.com/actions/checkout/issues/766 (git CVE-2022-24765)
for more detail."""
raise PhylumCalledProcessError(err, textwrap.dedent(msg)) from err

super().__init__(args)
Expand Down Expand Up @@ -153,7 +161,7 @@ def is_any_depfile_changed(self) -> bool:

err_msg = """\
Consider changing the `fetch-depth` input during checkout to fetch more branch history.
Reference: https://github.com/actions/checkout"""
Reference: https://github.com/actions/checkout"""
self.update_depfiles_change_status(pr_base_sha, err_msg)

return any(depfile.is_depfile_changed for depfile in self.depfiles)
Expand Down
6 changes: 3 additions & 3 deletions src/phylum/ci/ci_gitlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,8 @@ def common_ancestor_commit(self) -> Optional[str]:
except subprocess.CalledProcessError as err:
msg = """\
The common ancestor commit could not be found.
Ensure the git strategy is set to `clone` for repo checkouts:
https://docs.gitlab.com/ee/ci/runners/configure_runners.html#git-strategy"""
Ensure the git strategy is set to `clone` for repo checkouts:
https://docs.gitlab.com/ee/ci/runners/configure_runners.html#git-strategy"""
pprint_subprocess_error(err)
LOG.warning(textwrap.dedent(msg))
common_commit = None
Expand All @@ -164,7 +164,7 @@ def is_any_depfile_changed(self) -> bool:

err_msg = """\
Consider changing the `GIT_DEPTH` variable in CI settings to clone/fetch more branch history.
Reference: https://docs.gitlab.com/ee/ci/large_repositories/index.html#shallow-cloning"""
Reference: https://docs.gitlab.com/ee/ci/large_repositories/index.html#shallow-cloning"""
self.update_depfiles_change_status(diff_base_sha, err_msg)

return any(depfile.is_depfile_changed for depfile in self.depfiles)
Expand Down
8 changes: 5 additions & 3 deletions src/phylum/ci/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,11 +197,13 @@ def main(args: Optional[Sequence[str]] = None) -> int:
# Bail early when possible
LOG.debug("Dependency files in use: %s", ci_env.depfiles)
if ci_env.force_analysis:
LOG.info("Forced analysis specified with flag or otherwise set. Proceeding with analysis ...")
LOG.info("Forced analysis specified or otherwise set. Proceeding with analysis.")
elif ci_env.is_any_depfile_changed:
LOG.info("A lockfile has changed. Proceeding with analysis ...")
# "lockfile" language is used from here forward because the existence
# of even a single manifest means the forced analysis branch is taken
LOG.info("A lockfile has changed. Proceeding with analysis.")
elif ci_env.phylum_comment_exists:
LOG.info("Existing Phylum comment found. Proceeding with analysis ...")
LOG.info("Existing Phylum comment found. Proceeding with analysis.")
else:
LOG.warning("No lockfile has changed. Nothing to do.")
return 0
Expand Down
18 changes: 10 additions & 8 deletions src/phylum/ci/depfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,13 +120,13 @@ def deps(self) -> Packages:
if self.is_lockfile:
msg = f"""\
Please report this as a bug if you believe [code]{self!r}[/]
is a valid [code]{self.type}[/] lockfile."""
is a valid [code]{self.type}[/] lockfile."""
else:
msg = f"""\
Consider supplying lockfile type explicitly in the `.phylum_project` file.
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{self!r}[/]
is a valid [code]{self.type}[/] manifest file."""
For more info, see: https://docs.phylum.io/docs/lockfile_generation
Please report this as a bug if you believe [code]{self!r}[/]
is a valid [code]{self.type}[/] manifest file."""
raise PhylumCalledProcessError(err, textwrap.dedent(msg)) from err
return sorted(set(curr_depfile_packages))

Expand All @@ -146,10 +146,12 @@ def parse_depfile(cli_path: Path, lockfile_type: str, depfile_path: Path, start:
"""
if start is None:
start = Path.cwd()
msg = f"""\
Parsing [code]{os.path.relpath(depfile_path, start=start)}[/] as [code]{lockfile_type}[/] dependency file.
Manifests may take a while."""
LOG.info(textwrap.dedent(msg), extra=MARKUP)
LOG.info(
"Parsing [code]%s[/] as [code]%s[/] dependency file. Manifests take longer.",
os.path.relpath(depfile_path, start=start),
lockfile_type,
extra=MARKUP,
)
cmd = [str(cli_path), "parse", "--lockfile-type", lockfile_type, str(depfile_path)]
LOG.debug("Using parse command: %s", shlex.join(cmd))
LOG.debug("Running command from: %s", start)
Expand Down
Loading

0 comments on commit 5dfa874

Please sign in to comment.