Skip to content

Commit

Permalink
Merge pull request #5269 from berkeley-dsep-infra/staging
Browse files Browse the repository at this point in the history
merging 5263/5266/5267/5268 to prod
  • Loading branch information
sean-morris authored Dec 15, 2023
2 parents d954b01 + 5521ed2 commit b3b45c3
Show file tree
Hide file tree
Showing 6 changed files with 103 additions and 51 deletions.
48 changes: 24 additions & 24 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -299,11 +299,11 @@ jobs:
hubploy deploy --timeout 30m shiny hub ${CIRCLE_BRANCH}
no_output_timeout: 30m

# - run:
# name: Deploy stat159
# command: |
# hubploy deploy --timeout 30m stat159 hub ${CIRCLE_BRANCH}
# no_output_timeout: 30m
- run:
name: Deploy stat159
command: |
hubploy deploy --timeout 30m stat159 hub ${CIRCLE_BRANCH}
no_output_timeout: 30m

- run:
name: Deploy stat20
Expand Down Expand Up @@ -582,15 +582,15 @@ workflows:
ignore:
- staging
- prod
#- hubploy/build-image:
# deployment: stat159
# name: stat159 image build
# # Filters can only be per-job? wtf
# filters:
# branches:
# ignore:
# - staging
# - prod
- hubploy/build-image:
deployment: stat159
name: stat159 image build
# Filters can only be per-job? wtf
filters:
branches:
ignore:
- staging
- prod
- hubploy/build-image:
deployment: stat20
name: stat20 image build
Expand Down Expand Up @@ -755,15 +755,15 @@ workflows:
branches:
only:
- staging
#- hubploy/build-image:
# deployment: stat159
# name: stat159 image build
# push: true
# # Filters can only be per-job? wtf
# filters:
# branches:
# only:
# - staging
- hubploy/build-image:
deployment: stat159
name: stat159 image build
push: true
# Filters can only be per-job? wtf
filters:
branches:
only:
- staging
- hubploy/build-image:
deployment: stat20
name: stat20 image build
Expand Down Expand Up @@ -796,7 +796,7 @@ workflows:
#- logodev image build
- publichealth image build
- shiny image build
#- stat159 image build
- stat159 image build
- stat20 image build

filters:
Expand Down
2 changes: 1 addition & 1 deletion deployments/a11y/image/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ dependencies:
# Upgrade separate from what everyone else uses for now
# https://github.com/berkeley-dsep-infra/datahub/issues/3693
- notebook==7.0.2
- jupyterlab==4.0.4
- jupyterlab==4.0.9
- git+https://github.com/shaneknapp/python-popularity-contest.git@add-error-handling
# ###
# The items below are from infra-requirements, however lab conflicts with the
Expand Down
1 change: 1 addition & 0 deletions deployments/edx/image/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ dependencies:
- pip
- pip:
# - -r infra-requirements.txt
- datascience==0.17.6
- ipywidgets==8.0.7
# disable until fixed (probably this: https://github.com/jupyterlab/jupyter-collaboration/issues/162)
# - jupyter_collaboration==1.0.1
Expand Down
4 changes: 4 additions & 0 deletions deployments/julia/image/install-julia-packages.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
using Pkg;

Pkg.add.([
Pkg.PackageSpec(;name="FFTW", version="1.7.1"),
Pkg.PackageSpec(;name="FastGaussQuadrature", version="1.0.0"),
Pkg.PackageSpec(;name="ForwardDiff", version="0.10.36"),
Pkg.PackageSpec(;name="NBInclude", version="2.3.0"),
Pkg.PackageSpec(;name="Delaunator", version="0.1.1"),
Pkg.PackageSpec(;name="TriplotRecipes", version="0.1.2"),
Pkg.PackageSpec(;name="StaticArrays", version="1.6.2"),
Expand Down
2 changes: 1 addition & 1 deletion deployments/stat159/image/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ dependencies:

# Packages not available on conda-forge, installed through pip
- pip:
- ipython-sql==0.4.1
- jupysql==0.10.5
- ipytest==0.13.0
- jupyterhub==4.0.2
- nbval==0.10.0
Expand Down
97 changes: 72 additions & 25 deletions scripts/delete-unused-users.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"""
import argparse
from datetime import timedelta, datetime
import json
import logging
import os
import requests
Expand All @@ -26,11 +27,6 @@
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
logger = logging.getLogger(__name__)

token = os.environ["JUPYTERHUB_API_TOKEN"]
headers = {
"Accept": "application/jupyterhub-pagination+json",
"Authorization": f"Bearer {token}",
}

def parse_timedelta(args):
"""
Expand All @@ -52,7 +48,7 @@ def parse_timedelta(args):
result[key] = value
return timedelta(**result)

def retrieve_users(hub_url, inactive_since):
def retrieve_users(hub_url, headers, inactive_since):
"""Returns generator of user models that should be deleted"""
url = hub_url.rstrip("/") + "/hub/api/users"
next_page = True
Expand Down Expand Up @@ -100,70 +96,121 @@ def should_delete(user, inactive_since):
logger.debug(f"Recent activity: {was_active_recently}")
logger.debug(f"Running server: {user['server']}")
if was_active_recently or user['server'] is not None:
logger.info(f"Not deleting {user['name']}")
logger.info(f"Not flagging {user['name']} for deletion.")
return False
else:
logger.info(f"Flagged {user['name']} for deletion.")
logger.info(f"Flagging {user['name']} for deletion.")
return True

def delete_user(hub_url, name):
def delete_user(hub_url, headers, name):
"""Delete a given user by name via JupyterHub API"""
r = requests.delete(
hub_url.rstrip("/") + f"/hub/api/users/{name}",
headers=headers,
)
r.raise_for_status()

def delete_users_from_hub(hub_url, token, inactive_since, dry_run=False):
"""Delete users from a provided hub url"""
headers = {
"Accept": "application/jupyterhub-pagination+json",
"Authorization": f"Bearer {token}",
}

print(f"Scanning for users eligible for deletion on hub: {hub_url}")
users = list(retrieve_users(hub_url, headers, inactive_since))
print(f"Flagged {len(users)} users for deletion.")

count = 1
for user in users:
if not dry_run:
delete_user(hub_url, headers, user['name'])
logger.info(f"{count}: deleting {user['name']}")
else:
logger.info(f"Skipped {user['name']} due to dry run.")
count += 1

count -= 1
if not dry_run:
print(f"Deleted {count} total users from the ORM for hub: {hub_url}")
else:
print(f"Dry run: Did not delete {count} total users from the ORM for hub: {hub_url}")

def main(args):
"""
Get users from a hub, check to see if they should be deleted from the ORM
and if so, delete them!
"""
count = 1
for user in list(retrieve_users(args.hub_url, args.inactive_since)):
print(f"{count}: deleting {user['name']}")
count += 1
if not args.dry_run:
delete_user(args.hub_url, user['name'])
else:
logger.warning(f"Skipped {user['name']} due to dry run.")
if args.credentials and args.hub_url:
logger.error(f"Please use only one of --hub_url or --credentials options when executing the script.")
raise

if args.hub_url:
logger.info(f"Checking for and deleting ORM users on a single hub: {args.hub_url}")
token = os.environ["JUPYTERHUB_API_TOKEN"]

if not token:
logger.error("Environment variable JUPYTERHUB_API_TOKEN is not set.")
raise

delete_users_from_hub(args.hub_url, token, args.inactive_since, args.dry_run)

elif args.credentials:
logger.debug(f"Attempting to load credentials file: {args.credentials}")
creds = json.loads(open(args.credentials).read())
if not creds:
logger.error(f"The credentials file is empty: {args.credentials}")
raise

for hub in creds.keys():
logger.info(f"Checking for and deleting ORM users on hub: {hub}")
token = creds[hub]
delete_users_from_hub(hub, token, args.inactive_since, args.dry_run)
print()

else:
logger.error("You must specify a single hub with the --hub_url argument, or a json file containing hubs and api keys with the --credentials argument.")
raise

count -= 1
print(f"Deleted {count} total users from the ORM.")

if __name__ == "__main__":
argparser = argparse.ArgumentParser()
argparser.add_argument(
'-c',
'--credentials',
dest='credentials',
help='Path to a json file containing hub url and api keys. Format is: {"hub1_url": "hub1_key", "hub2_url":, "hub2_key"}'
)
argparser.add_argument(
'-H',
'--hub_url',
help='Fully qualified URL to the JupyterHub',
required=True
help='Fully qualified URL to the JupyterHub. You must also set the JUPYTERHUB_API_TOKEN environment variable with the API key.'
)
argparser.add_argument(
'--dry_run',
action='store_true',
help='Dry run without deleting users'
help='Dry run without deleting users.'
)
argparser.add_argument(
'--inactive_since',
default='hours=24',
type=parse_timedelta,
help='Period of inactivity after which users are considered for deletion (literal string constructor values for timedelta objects)'
help='Period of inactivity after which users are considered for deletion (literal string constructor values for timedelta objects).'
# https://docs.python.org/3/library/datetime.html#timedelta-objects
)
argparser.add_argument(
'-v',
'--verbose',
dest='verbose',
action='store_true',
help='Set info log level'
help='Set info log level.'
)
argparser.add_argument(
'-d',
'--debug',
dest='debug',
action='store_true',
help='Set debug log level'
help='Set debug log level.'
)
args = argparser.parse_args()

Expand Down

0 comments on commit b3b45c3

Please sign in to comment.