Skip to content

Commit

Permalink
temp development commit
Browse files Browse the repository at this point in the history
  • Loading branch information
JamesKunstle committed Sep 18, 2023
1 parent 2c31cf1 commit 89c942d
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 26 deletions.
61 changes: 59 additions & 2 deletions pages/index/index_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,65 @@


@callback(
[Output("user-group-loading-signal", "data")],
dash.Output("projects", "data"),
dash.Input("user-groups-ready-signal", "data"),
dash.Input("url", "href"),
dash.State("projects", "data"),
prevent_initial_call=True,
)
def sync_searchbar_options(signal, url, searchbar_data):
"""
fires whenever the page updates or when the ready-signal
is triggered following user group querying.
"""
if current_user.is_authenticated:
user_id = current_user.get_id()
users_cache = StrictRedis(
host="redis-users",
port=6379,
password=os.getenv("REDIS_PASSWORD", ""),
)
if users_cache.exists(f"{user_id}_searchbar_data_groups"):
searchbar_data_groups = users_cache.get(f"{user_id}_searchbar_data_groups")
return searchbar_data_groups + searchbar_data

return dash.no_update


@callback(
dash.Output("user-groups-locking-div", "children"),
dash.Output("user-groups-ready-signal", "data"),
dash.Input("user-groups-loading-signal", "data"),
background=True,
prevent_initial_call=True,
)
def wait_on_user_groups(job_id):
"""
When the 'loading' signal has been fired, group querying
has kicked off.
This callback forces the searchbar into a loading state while
the user's groups are collected.
When the groups become available, the locking div is unlocked
and the 'user-groups-ready-signal' fires, alerting a searchbar
update callback downstream.
"""
result = AsyncResult(job_id)
while True:
if result.successful():
break
elif result.failed():
# TODO: notify user that groups aren't available.
return dash.exceptions.PreventUpdate

time.sleep(1.0)

return dash.no_update, job_id


@callback(
[Output("user-groups-loading-signal", "data")],
[Input("url", "href"), Input("refresh-button", "n_clicks")],
prevent_initial_call=True,
)
def kick_off_group_collection(url, n_clicks):
"""Schedules a Celery task to collect user groups.
Expand All @@ -59,7 +116,7 @@ def kick_off_group_collection(url, n_clicks):
# TODO: check how old groups are. If they're pretty old (threshold tbd) then requery

# check if groups are not already cached, or if the refresh-button was pressed
if not users_cache.exists(f"{user_id}_groups") or (
if not users_cache.exists(f"{user_id}_searchbar_data_groups") or (
dash.ctx.triggered_id == "refresh-button"
):
# kick off celery task to collect groups
Expand Down
32 changes: 20 additions & 12 deletions pages/index/index_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,17 +157,24 @@
[
html.Div(
[
dmc.MultiSelect(
id="projects",
data=augur.searchbar_data,
placeholder="Select Orgs, Repos, and User-defined groups. (Max 50 options shown at once)",
label="Searchbar",
searchable=True,
clearable=True,
nothingFound="No matches for input",
variant="filled",
style={"fontSize": 16},
limit=50,
dcc.Loading(
id="multiselect-loading-div",
children=[
dmc.MultiSelect(
id="projects",
data=augur.searchbar_data,
placeholder="Select Orgs, Repos, and User-defined groups. (Max 50 options shown at once)",
label="Searchbar",
searchable=True,
clearable=True,
nothingFound="No matches for input",
variant="filled",
style={"fontSize": 16},
limit=50,
),
# hidden div that forces loading state while user groups are being queried.
html.Div(id="user-groups-locking-div"),
],
),
dbc.Alert(
children='Please ensure that your spelling is correct. \
Expand Down Expand Up @@ -258,7 +265,8 @@
id="augur_token_expiration_dash_persistence", storage_type="local", data=""
),
dcc.Store(id="login-succeeded", data=True),
dcc.Store(id="user-group-loading-signal", data="", storage_type="memory"),
dcc.Store(id="user-groups-loading-signal", data="", storage_type="memory"),
dcc.Store(id="user-groups-ready-signal", data="", storage_type="memory"),
dcc.Location(id="url"),
navbar,
dbc.Row(
Expand Down
24 changes: 12 additions & 12 deletions queries/user_groups_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,16 +47,16 @@ def user_groups_query(self, user_id):
user = json.loads(users_cache.get(user_id))

# query groups and options from Augur
users_groups, users_options = get_user_groups(
group_to_repos, searchbar_data_groups = get_user_groups(
user["username"], user["access_token"]
)

# stores groups and options in cache
groups_set = users_cache.set(
name=f"{user_id}_groups", value=json.dumps(users_groups)
name=f"{user_id}_group_to_repos", value=json.dumps(group_to_repos)
)
options_set = users_cache.set(
name=f"{user_id}_group_options", value=json.dumps(users_options)
name=f"{user_id}_searchbar_data_groups", value=json.dumps(searchbar_data_groups)
)

# returns success of operation
Expand All @@ -81,8 +81,8 @@ def get_user_groups(username, bearer_token):
# structure of the incoming data
# [{group_name: {favorited: False, repos: [{repo_git: asd;lfkj, repo_id=46555}, ...]}, ...]
# creates the group_name->repo_list mapping and the searchbar options for augur user groups
users_groups = {}
users_group_options = []
group_to_repos = {}
searchbar_data_groups = []
g = augur_users_groups.get("data")

# each of the augur user groups
Expand All @@ -100,19 +100,19 @@ def get_user_groups(username, bearer_token):
if len(ids) == 0:
continue

# using lower_name for convenience later- no .lower() calls
lower_name = group_name.lower()

# group_name->repo_list mapping
users_groups[lower_name] = ids
group_to_repos[group_name] = ids

# searchbar options
# user's groups are prefixed w/ username to guarantee uniqueness in searchbar
users_group_options.append(
{"value": lower_name, "label": f"{username}: {group_name}"}
searchbar_data_groups.append(
{
"label": f"{username}: {group_name}",
"value": {"type": "group", "group_name": group_name},
}
)

return users_groups, users_group_options
return group_to_repos, searchbar_data_groups


def parse_repolist(repo_list, prepend_to_url=""):
Expand Down

0 comments on commit 89c942d

Please sign in to comment.