Skip to content
This repository has been archived by the owner on Oct 4, 2024. It is now read-only.

Commit

Permalink
Merge pull request #458 from gilesknap/skeleton-merge
Browse files Browse the repository at this point in the history
Skeleton merge - update to python 3.12
  • Loading branch information
gilesknap authored Dec 30, 2023
2 parents 1516059 + 78cbee5 commit 2e2335f
Show file tree
Hide file tree
Showing 28 changed files with 707 additions and 529 deletions.
84 changes: 33 additions & 51 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,68 +1,50 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.231.6/containers/python-3
// For format details, see https://containers.dev/implementors/json_reference/
{
"name": "Python 3",
"name": "Python 3 Developer Container",
"build": {
"dockerfile": "Dockerfile",
"target": "developer",
"context": "..",
"args": {}
"dockerfile": "../Dockerfile",
"target": "build",
// Only upgrade pip, we will install the project below
"args": {
"PIP_OPTIONS": "--upgrade pip"
}
},
"remoteEnv": {
"DISPLAY": "${localEnv:DISPLAY}"
},
// Set *default* container specific settings.json values on container create.
"settings": {
"python.defaultInterpreterPath": "/usr/local/bin/python",
"python.linting.enabled": true,
"python.linting.pylintEnabled": true,
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"streetsidesoftware.code-spell-checker",
"ryanluker.vscode-coverage-gutters",
"mhutchie.git-graph",
"eamodio.gitlens",
"gruntfuggly.todo-tree",
"redhat.vscode-yaml",
"nsd.vscode-epics",
"alefragnani.bookmarks"
],
// Add the URLs of features you want added when the container is built.
"features": {
//"docker-from-docker": "20.10",
"git": "os-provided"
"ghcr.io/devcontainers/features/common-utils:1": {
"username": "none",
"upgradePackages": false
}
},
"customizations": {
"vscode": {
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"tamasfe.even-better-toml",
"redhat.vscode-yaml",
"ryanluker.vscode-coverage-gutters"
]
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode",
// Make sure the files we are mapping into the container exist on the host
"initializeCommand": "bash -c 'for i in $HOME/.inputrc $HOME/.bashrc_dev; do [ -f $i ] || touch $i; done'",
"initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'",
"runArgs": [
"--privileged",
"--net=host",
"-v=${localEnv:HOME}/.ssh:/root/.ssh",
"-v=${localEnv:HOME}/.bashrc_dev:/root/.bashrc",
"-v=${localEnv:HOME}/.inputrc:/root/.inputrc"
"--security-opt=label=type:container_runtime_t"
],
"mounts": [
// map in home directory - not strictly necessary but may be useful
"source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind",
"source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind",
// map in home directory - not strictly necessary but useful
"source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached"
],
"workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind",
"workspaceFolder": "/workspace",
// make the workspace folder the same inside and outside of the container
"workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind",
"workspaceFolder": "${localWorkspaceFolder}",
// After the container is created, install the python project in editable form
// This installs into the system python of the container
"postCreateCommand": "pip install $([ -f requirements_dev.txt ] && echo -r requirements_dev.txt ) -e .[dev]"
"postCreateCommand": "pip install -e '.[dev]'"
}
35 changes: 35 additions & 0 deletions .github/CONTRIBUTING.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
Contributing to the project
===========================

Contributions and issues are most welcome! All issues and pull requests are
handled through GitHub_. Also, please check for any existing issues before
filing a new one. If you have a great idea but it involves big changes, please
file a ticket before making a pull request! We want to make sure you don't spend
your time coding something that might not fit the scope of the project.

.. _GitHub: https://github.com/gilesknap/gphotos-sync/issues

Issue or Discussion?
--------------------

Github also offers discussions_ as a place to ask questions and share ideas. If
your issue is open ended and it is not obvious when it can be "closed", please
raise it as a discussion instead.

.. _discussions: https://github.com/gilesknap/gphotos-sync/discussions

Code coverage
-------------

While 100% code coverage does not make a library bug-free, it significantly
reduces the number of easily caught bugs! Please make sure coverage remains the
same or is improved by a pull request!

Developer guide
---------------

The `Developer Guide`_ contains information on setting up a development
environment, running the tests and what standards the code and documentation
should follow.

.. _Developer Guide: https://diamondlightsource.github.io/gphotos-sync/main/developer/how-to/contribute.html
57 changes: 57 additions & 0 deletions .github/actions/install_requirements/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
name: Install requirements
description: Run pip install with requirements and upload resulting requirements
inputs:
requirements_file:
description: Name of requirements file to use and upload
required: true
install_options:
description: Parameters to pass to pip install
required: true
python_version:
description: Python version to install
default: "3.x"

runs:
using: composite

steps:
- name: Setup python
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}

- name: Pip install
run: |
touch ${{ inputs.requirements_file }}
# -c uses requirements.txt as constraints, see 'Validate requirements file'
pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }}
shell: bash

- name: Create lockfile
run: |
mkdir -p lockfiles
pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }}
# delete the self referencing line and make sure it isn't blank
sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }}
shell: bash

- name: Upload lockfiles
uses: actions/upload-artifact@v3
with:
name: lockfiles
path: lockfiles

# This eliminates the class of problems where the requirements being given no
# longer match what the packages themselves dictate. E.g. In the rare instance
# where I install some-package which used to depend on vulnerable-dependency
# but now uses good-dependency (despite being nominally the same version)
# pip will install both if given a requirements file with -r
- name: If requirements file exists, check it matches pip installed packages
run: |
if [ -s ${{ inputs.requirements_file }} ]; then
if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then
echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive"
exit 1
fi
fi
shell: bash
16 changes: 16 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates

version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
10 changes: 5 additions & 5 deletions .github/pages/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
<html>

<head>
<title>Redirecting to main branch</title>
<meta charset="utf-8">
<meta http-equiv="refresh" content="0; url=./main/index.html">
<link rel="canonical" href="main/index.html">
<title>Redirecting to main branch</title>
<meta charset="utf-8">
<meta http-equiv="refresh" content="0; url=./main/index.html">
<link rel="canonical" href="main/index.html">
</head>

</html>
</html>
99 changes: 99 additions & 0 deletions .github/pages/make_switcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
import json
import logging
from argparse import ArgumentParser
from pathlib import Path
from subprocess import CalledProcessError, check_output
from typing import List, Optional


def report_output(stdout: bytes, label: str) -> List[str]:
ret = stdout.decode().strip().split("\n")
print(f"{label}: {ret}")
return ret


def get_branch_contents(ref: str) -> List[str]:
"""Get the list of directories in a branch."""
stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref])
return report_output(stdout, "Branch contents")


def get_sorted_tags_list() -> List[str]:
"""Get a list of sorted tags in descending order from the repository."""
stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"])
return report_output(stdout, "Tags list")


def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]:
"""Generate the file containing the list of all GitHub Pages builds."""
# Get the directories (i.e. builds) from the GitHub Pages branch
try:
builds = set(get_branch_contents(ref))
except CalledProcessError:
builds = set()
logging.warning(f"Cannot get {ref} contents")

# Add and remove from the list of builds
if add:
builds.add(add)
if remove:
assert remove in builds, f"Build '{remove}' not in {sorted(builds)}"
builds.remove(remove)

# Get a sorted list of tags
tags = get_sorted_tags_list()

# Make the sorted versions list from main branches and tags
versions: List[str] = []
for version in ["master", "main"] + tags:
if version in builds:
versions.append(version)
builds.remove(version)

# Add in anything that is left to the bottom
versions += sorted(builds)
print(f"Sorted versions: {versions}")
return versions


def write_json(path: Path, repository: str, versions: str):
org, repo_name = repository.split("/")
struct = [
{"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"}
for version in versions
]
text = json.dumps(struct, indent=2)
print(f"JSON switcher:\n{text}")
path.write_text(text, encoding="utf-8")


def main(args=None):
parser = ArgumentParser(
description="Make a versions.txt file from gh-pages directories"
)
parser.add_argument(
"--add",
help="Add this directory to the list of existing directories",
)
parser.add_argument(
"--remove",
help="Remove this directory from the list of existing directories",
)
parser.add_argument(
"repository",
help="The GitHub org and repository name: ORG/REPO",
)
parser.add_argument(
"output",
type=Path,
help="Path of write switcher.json to",
)
args = parser.parse_args(args)

# Write the versions file
versions = get_versions("origin/gh-pages", args.add, args.remove)
write_json(args.output, args.repository, versions)


if __name__ == "__main__":
main()
Loading

0 comments on commit 2e2335f

Please sign in to comment.