Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve 'View Data': Add download links, add names, improve design #356

Merged
merged 2 commits into from
May 3, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 27 additions & 25 deletions datameta/api/metadatasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from pyramid.request import Request
from typing import Optional, Dict
from ..linting import validate_metadataset_record
from .. import security, siteid, models
from .. import security, siteid, models, resource
from ..security import authz
import datetime
from ..resource import resource_by_id, get_identifier
Expand All @@ -29,25 +29,25 @@
class MetaDataSetResponse(DataHolderBase):
"""MetaDataSetResponse container for OpenApi communication"""
id: dict
record: dict
record: Dict[str, Optional[str]]
file_ids: Dict[str, Optional[Dict[str, str]]]
user_id: str
submission_id: Optional[str] = None

def render_record_values(mdatum:Dict[str, models.MetaDatum], record:dict) -> dict:
def render_record_values(metadata:Dict[str, models.MetaDatum], record:dict) -> dict:
"""Renders values of a metadataset record. Please note: the record should already have passed validation."""
record_rendered = record.copy()
for field in mdatum:
for field in metadata:
if not field in record_rendered.keys():
# if field is not contained in record, add it as None to the record:
record_rendered[field] = None
continue
elif record_rendered[field] and mdatum[field].datetimefmt:
elif record_rendered[field] and metadata[field].datetimefmt:
# if MetaDatum is a datetime field, render the value in isoformat
record_rendered[field] = datetime.datetime.strptime(
record_rendered[field],
mdatum[field].datetimefmt
metadata[field].datetimefmt
).isoformat()

return record_rendered

def formatted_mrec_value(mrec):
Expand All @@ -59,9 +59,9 @@ def formatted_mrec_value(mrec):
def get_record_from_metadataset(mdata_set:models.MetaDataSet) -> dict:
""" Construct a dict containing all records of that MetaDataSet"""
return {
rec.metadatum.name: formatted_mrec_value(rec)
for rec in mdata_set.metadatumrecords
}
rec.metadatum.name : formatted_mrec_value(rec)
for rec in mdata_set.metadatumrecords
}

def delete_staged_metadataset_from_db(mdata_id, db, auth_user, request):
# Find the requested metadataset
Expand Down Expand Up @@ -124,8 +124,8 @@ def post(request:Request) -> MetaDataSetResponse:
mdatum_query = db.query(models.MetaDatum).order_by(
models.MetaDatum.order
).all()
mdatum = {mdat.name: mdat for mdat in mdatum_query }
record = render_record_values(mdatum, record)
metadata = {mdat.name: mdat for mdat in mdatum_query }
record = render_record_values(metadata, record)

# construct new MetaDataSet:
mdata_set = models.MetaDataSet(
Expand All @@ -139,18 +139,19 @@ def post(request:Request) -> MetaDataSetResponse:
# construct new MetaDatumRecords
for name, value in record.items():
mdatum_rec = models.MetaDatumRecord(
metadatum_id = mdatum[name].id,
metadataset_id = mdata_set.id,
file_id = None,
value = value
metadatum_id = metadata[name].id,
metadataset_id = mdata_set.id,
file_id = None,
value = value
)
db.add(mdatum_rec)

return MetaDataSetResponse(
id = get_identifier(mdata_set),
record = record,
user_id = get_identifier(mdata_set.user),
submission_id = get_identifier(mdata_set.submission) if mdata_set.submission else None,
id = get_identifier(mdata_set),
record = record,
file_ids = { name : None for name, metadatum in metadata.items() if metadatum.isfile },
user_id = get_identifier(mdata_set.user),
submission_id = get_identifier(mdata_set.submission) if mdata_set.submission else None,
)

@view_config(
Expand All @@ -160,7 +161,7 @@ def post(request:Request) -> MetaDataSetResponse:
openapi=True
)
def get_metadataset(request:Request) -> MetaDataSetResponse:
"""Create new metadataset"""
"""Get a metadataset by ID"""
auth_user = security.revalidate_user(request)
db = request.dbsession
mdata_set = resource_by_id(db, models.MetaDataSet, request.matchdict['id'])
Expand All @@ -172,10 +173,11 @@ def get_metadataset(request:Request) -> MetaDataSetResponse:
raise HTTPForbidden()

return MetaDataSetResponse(
id=get_identifier(mdata_set),
record=get_record_from_metadataset(mdata_set),
user_id=get_identifier(mdata_set.user),
submission_id=get_identifier(mdata_set.submission) if mdata_set.submission else None,
id = get_identifier(mdata_set),
record = get_record_from_metadataset(mdata_set),
file_ids = { mdrec.metadatum.name : resource.get_identifier_or_none(mdrec.file) for mdrec in mdata_set.metadatumrecords if mdrec.metadatum.isfile },
user_id = get_identifier(mdata_set.user),
submission_id = get_identifier(mdata_set.submission) if mdata_set.submission else None,
)

@view_config(
Expand Down
9 changes: 6 additions & 3 deletions datameta/api/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
openapi: 3.0.0
info:
description: DataMeta
version: 0.15.0
version: 0.16.0
title: DataMeta

servers:
Expand Down Expand Up @@ -1096,8 +1096,11 @@ components:
record:
type: object
additionalProperties: true
# a free-form object,
# any property is allowed
# a free-form object, any property is allowed
fileIds:
type: object
additionalProperties: true
# a free-form object mapping the field names to file IDs
id:
$ref: "#/components/schemas/Identifier"
submissionId:
Expand Down
44 changes: 25 additions & 19 deletions datameta/api/ui/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@

import pandas as pd

from ... import security, samplesheet, errors
from ... import security, samplesheet, errors, resource
from ...security import authz
from ...resource import get_identifier
from ...models import MetaDatum, MetaDataSet, MetaDatumRecord, User, Group, Submission

Expand All @@ -40,6 +41,8 @@ class ViewTableResponse(MetaDataSetResponse):
"""Data class representing the JSON response returned by POST:/api/ui/view"""
submission_label : Optional[str] = None
group_id : Optional[dict] = None
group_name: Optional[str] = None
user_name: Optional[str] = None

def metadata_index_to_name(db, idx):
name = db.query(MetaDatum.name).order_by(MetaDatum.order).limit(1).offset(idx).scalar()
Expand Down Expand Up @@ -92,10 +95,12 @@ def post(request: Request):
and_filters = [
# This clause joins the EXISTS subquery with the main query
MetaDataSet.id==MetaDataSetFilter.id,
# This clause restricts the results to submissions of the user's group
Submission.group_id == auth_user.group_id
]

# This clause restricts the results to submissions of the user's group
if not authz.view_mset_any(auth_user):
and_filters.append(Submission.group_id == auth_user.group_id)

# Additionally, if a search pattern was requested, we create a clause
# implementing the the search and add it to the AND clause
if searches:
Expand All @@ -116,7 +121,7 @@ def post(request: Request):
# search term with the metadataset and submission site_ids, the
# submission label and the MetaDatumRecord value, using the table alias
# that was constructed for the search term before.
search_clauses = [ or_(*( field.ilike(f"%{search}%") for field in [ User.site_id, Group.site_id, MetaDataSetFilter.site_id, Submission.site_id, Submission.label, MetaDatumRecordFilter.value ])) for search, MetaDatumRecordFilter in searches ]
search_clauses = [ or_(*( field.ilike(f"%{search}%") for field in [ User.site_id, User.fullname, Group.site_id, Group.name, MetaDataSetFilter.site_id, Submission.site_id, Submission.label, MetaDatumRecordFilter.value ])) for search, MetaDatumRecordFilter in searches ]
and_filters += search_clauses

# Finally, the filter query, which will be added to the main query as a
Expand Down Expand Up @@ -144,18 +149,16 @@ def post(request: Request):

MetaDatumRecordOrder = aliased(MetaDatumRecord)
mdata_name = None
if sort_idx == 0: # The submission site ID
mdatasets_base_query = mdatasets_base_query.join(Submission).order_by(direction(Submission.site_id))
elif sort_idx == 1: # The submission label
if sort_idx == 0: # The submission label
mdatasets_base_query = mdatasets_base_query.join(Submission).order_by(direction(Submission.label))
elif sort_idx == 2: # The metadataset user site ID
mdatasets_base_query = mdatasets_base_query.join(User).order_by(direction(User.site_id))
elif sort_idx == 3: # The submission group site ID
mdatasets_base_query = mdatasets_base_query.join(Submission).join(Group).order_by(direction(Group.site_id))
elif sort_idx == 4: # The metadataset site ID
elif sort_idx == 1: # The user full name
mdatasets_base_query = mdatasets_base_query.join(User).order_by(direction(User.fullname)) # TODO FIX
elif sort_idx == 2: # The submission group name
mdatasets_base_query = mdatasets_base_query.join(Submission).join(Group).order_by(direction(Group.site_id)) # TODO FIX
elif sort_idx == 3: # The metadataset site ID
mdatasets_base_query = mdatasets_base_query.order_by(direction(MetaDataSet.site_id))
else: # Sorting by a metadatum value
mdata_name = metadata_index_to_name(db, sort_idx - 5)
mdata_name = metadata_index_to_name(db, sort_idx - 4)
# [WARNING] The following JOIN assumes that an inner join between MetaDatumRecord
# and MetaDatum does not result in a loss of rows if the JOIN is restricted to one
# particular MetaDatum.name. Put differently, this query requires that we always
Expand Down Expand Up @@ -196,12 +199,15 @@ def post(request: Request):
# Build the 'data' response
data = [
ViewTableResponse(
id = get_identifier(mdata_set),
record = get_record_from_metadataset(mdata_set),
user_id = get_identifier(mdata_set.user),
group_id = get_identifier(mdata_set.submission.group),
submission_id = get_identifier(mdata_set.submission) if mdata_set.submission else None,
submission_label = mdata_set.submission.label
id = get_identifier(mdata_set),
record = get_record_from_metadataset(mdata_set),
file_ids = { mdrec.metadatum.name : resource.get_identifier_or_none(mdrec.file) for mdrec in mdata_set.metadatumrecords if mdrec.metadatum.isfile },
user_id = get_identifier(mdata_set.user),
user_name = mdata_set.user.fullname,
group_id = get_identifier(mdata_set.submission.group),
group_name = mdata_set.submission.group.name,
submission_id = get_identifier(mdata_set.submission) if mdata_set.submission else None,
submission_label = mdata_set.submission.label
)
for mdata_set, _ in mdata_sets
]
Expand Down
6 changes: 6 additions & 0 deletions datameta/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@ def get_identifier(db_obj):
pass
return ids

def get_identifier_or_none(db_obj):
"""Given a database object, return the identifying IDs as a dictionary or None if the objecft is None"""
if db_obj is None:
return None
return get_identifier(db_obj)

def resource_query_by_id(db, model, idstring):
"""Returns a database query that returns an entity based on it's uuid or
site_id as specified by idstring.
Expand Down
6 changes: 4 additions & 2 deletions datameta/security/authz.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ def has_data_access(user, data_user_id, data_group_id=None, was_submitted=False)
(not was_submitted and data_user_id and data_user_id == user.id)
))


def view_apikey(user, target_user):
return user_is_target(user, target_user)

Expand Down Expand Up @@ -76,6 +75,9 @@ def submit_mset(user, mds_obj):
def delete_mset(user, mdata_set):
return user.id == mdata_set.user_id

def view_mset_any(user):
return user.site_read

def view_mset(user, mds_obj):
was_submitted = bool(mds_obj.submission_id is not None)
group_id = mds_obj.submission.group_id if was_submitted else None
Expand Down Expand Up @@ -107,4 +109,4 @@ def update_user_name(user, target_user):
return any((
has_group_rights(user, target_user.group),
user_is_target(user, target_user)
))
))
9 changes: 9 additions & 0 deletions datameta/static/css/datameta.css
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,13 @@
color: inherit;
text-decoration: inherit;
}
.text-accent {
color:#dc3545 !important;
}
.background-accent {
color:#ffc107 !important;
}
.border-accent {
border-color:#ffc107 !important;
}

4 changes: 2 additions & 2 deletions datameta/static/js/submit.js
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ DataMeta.submit.buildKeyColunns = function(fileKeys, keys) {
if (fileKeys.includes(key)) {
/* FILE VALUES */
columns.push({ title:key, data:null, render:function(metadataset) {
var str = metadataset.record[key] === null ? '<span class="text-black-50"><i>empty</i></span>' : metadataset.record[key];
var str = metadataset.record[key].value === null ? '<span class="text-black-50"><i>empty</i></span>' : metadataset.record[key].value;
return '<span data-datameta-class="field-status" data-datameta-uuid="'+metadataset.id.uuid+'" data-datameta-field="'+key+'">' +
'<i class="bi bi-hdd-rack-fill text-secondary" data-datameta-class="status-none" style="display:inline"></i>' +
'<i class="bi bi-hdd-rack-fill text-danger" data-datameta-class="status-err" style="display:none"></i>' +
Expand All @@ -237,7 +237,7 @@ DataMeta.submit.buildKeyColunns = function(fileKeys, keys) {
} else {
/* NON-FILE VALUES */
columns.push({ title:key, data:"record", render: function(record) {
var str = record[key] === null ? '<span class="text-black-50"><i>empty</i></span>' : record[key];
var str = record[key].value === null ? '<span class="text-black-50"><i>empty</i></span>' : record[key].value;
return str;
}});
}
Expand Down
38 changes: 26 additions & 12 deletions datameta/static/js/view.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,21 @@
DataMeta.view = {}

DataMeta.view.buildColumns = function(mdata_names) {
return mdata_names.map(function(mdata_name) {
return {
title : mdata_name,
data : null,
render : mdataset => mdataset.record[mdata_name] ? mdataset.record[mdata_name] : "<em>empty</em>"
};
});
return mdata_names.map(function(mdata_name) {
return {
title : mdata_name,
data : null,
render : function(mdataset, type, row, meta) {
console.log(mdataset)
// Special case NULL
if (mdataset.record[mdata_name] === null) return '<span class="text-black-50"><i>empty</i></span>';
// Speical case file
if (mdataset.fileIds[mdata_name]) return '<a class="link-bare" href="' + DataMeta.api('rpc/get-file-url/'+mdataset.fileIds[mdata_name].site) +'"><i class="bi bi-cloud-arrow-down-fill"></i> '+mdataset.record[mdata_name]+'</a>';
// All other cases
return mdataset.record[mdata_name];
}
};
});
}

DataMeta.view.initTable = function() {
Expand All @@ -44,11 +52,17 @@ DataMeta.view.initTable = function() {
var mdata_names = json.map(record => record.name);

var columns = [
{ title: "Submission", data: "submissionId.site", className: "id_col"},
{ title: "Label", data: "submissionLabel", className: "id_col"},
{ title: "User", data: "userId.site", className: "id_col"},
{ title: "Group", data: "groupId.site", className: "id_col"},
{ title: "Metadataset", data: "id.site", className: "id_col"}
{ title: "Submission", data: null, className: "id_col", render: function(data) {
var label = data.submissionLabel ? data.submissionLabel : '<span class="text-black-50"><i>empty</i></span>';
return '<div> <div class="large-super">' + label + '</div><div class="text-accent small-sub">' + data.submissionId.site + '</div></div>'
}},
{ title: "User", data: null, className: "id_col", render: data =>
'<div> <div class="large-super">'+data.userName+'</div><div class="text-accent small-sub">'+data.userId.site+'</div></div>'
},
{ title: "Group", data: null, className: "id_col", render: data =>
'<div> <div class="large-super">'+data.groupName+'</div><div class="text-accent small-sub">'+data.groupId.site+'</div></div>'
},
{ title: "Metadataset", data: "id.site", className: "id_col", render: data => '<span class="text-accent">' + data + '</span>'}
].concat(DataMeta.view.buildColumns(mdata_names))

// Build table based on field names
Expand Down
2 changes: 1 addition & 1 deletion datameta/templates/layout.pt
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@
</head>
<body class="d-flex flex-column h-100" onload=init_widescreen()>

<header class="sticky-top d-flex flex-column flex-md-row align-items-center p-3 px-md-4 mb-3 bg-body border-bottom border-warning">
<header class="sticky-top d-flex flex-column flex-md-row align-items-center p-3 px-md-4 mb-3 bg-body border-bottom border-accent">
<div tal:replace="structure logo_html"></div>

<nav class="my-2 my-md-0 me-md-3">
Expand Down
11 changes: 10 additions & 1 deletion datameta/templates/view.pt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,16 @@
<link href="${request.static_url('datameta:static/css/dataTables.bootstrap5.css')}" rel="stylesheet">
<style>
td.id_col {
background-color: #f7d4da;
padding-bottom:0pt;
}
.large-super {
line-height: 1.0;
margin-top: .25rem;
}
.small-sub {
line-height: 1.0;
font-size:6pt;
margin-top:0.1rem;
}
</style>
</metal:block>
Expand Down