+
+
{% endblock %}
diff --git a/curation_tracker/urls.py b/curation_tracker/urls.py
index e6b265f7..3385ce6d 100644
--- a/curation_tracker/urls.py
+++ b/curation_tracker/urls.py
@@ -6,6 +6,6 @@
path('curation_tracker/l2_curation', views.browse_l2_waiting, name='L2 Curation'),
path('curation_tracker/release_ready', views.browse_release_ready, name='Release Ready'),
# e.g. /upload/
- path("validate_metadata/", views.validate_metadata_template, name="Metadata Template Validation"),
+ path("validate_metadata_legacy/", views.validate_metadata_template, name="Metadata Template Validation"),
path('curation_tracker/stats/', views.stats, name='Curation Stats')
]
diff --git a/pgs_web/settings.py b/pgs_web/settings.py
index 3782389c..545c3b3a 100644
--- a/pgs_web/settings.py
+++ b/pgs_web/settings.py
@@ -55,10 +55,11 @@
# Application definition #
#------------------------#
INSTALLED_APPS = [
- 'catalog.apps.CatalogConfig',
+ 'catalog.apps.CatalogConfig',
'rest_api.apps.RestApiConfig',
'search.apps.SearchConfig',
'benchmark.apps.BenchmarkConfig',
+ 'validator.apps.ValidatorConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
diff --git a/pgs_web/urls.py b/pgs_web/urls.py
index 3709dd12..5946617c 100755
--- a/pgs_web/urls.py
+++ b/pgs_web/urls.py
@@ -18,9 +18,10 @@
from search import views as search_views
urlpatterns = [
- path('', include('catalog.urls')),
+ path('', include('catalog.urls')),
path('', include('rest_api.urls')),
path('', include('benchmark.urls')),
+ path('', include('validator.urls')),
re_path(r'^search/', search_views.search, name="PGS Catalog Search"),
re_path(r'^autocomplete/', search_views.autocomplete, name="PGS Catalog Autocomplete")
]
diff --git a/validator/__init__.py b/validator/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/validator/admin.py b/validator/admin.py
new file mode 100644
index 00000000..8c38f3f3
--- /dev/null
+++ b/validator/admin.py
@@ -0,0 +1,3 @@
+from django.contrib import admin
+
+# Register your models here.
diff --git a/validator/apps.py b/validator/apps.py
new file mode 100644
index 00000000..49e5efce
--- /dev/null
+++ b/validator/apps.py
@@ -0,0 +1,6 @@
+from django.apps import AppConfig
+
+
+class ValidatorConfig(AppConfig):
+ default_auto_field = 'django.db.models.BigAutoField'
+ name = 'validator'
diff --git a/validator/migrations/__init__.py b/validator/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/validator/models.py b/validator/models.py
new file mode 100644
index 00000000..71a83623
--- /dev/null
+++ b/validator/models.py
@@ -0,0 +1,3 @@
+from django.db import models
+
+# Create your models here.
diff --git a/validator/static/validator/js/metadata_consumer.js b/validator/static/validator/js/metadata_consumer.js
new file mode 100644
index 00000000..d055e7f9
--- /dev/null
+++ b/validator/static/validator/js/metadata_consumer.js
@@ -0,0 +1,123 @@
+const pyworker = await import((on_gae)?'./py-worker.min.js':'./py-worker.js');
+const asyncRun = pyworker.asyncRun;
+
+const validate_metadata = await fetch(new URL('../python/bin/validation_metadata.py', import.meta.url, null)).then(response => response.text());
+let dirHandle;
+
+
+async function validateFile() {
+ const fileInput = document.getElementById('myfile');
+ const file = fileInput.files[0];
+
+ if (file) {
+ const spinner = document.getElementById('pgs_loading');
+ spinner.style.visibility = "visible";
+
+ const reader = new FileReader();
+ reader.onload = async function(event) {
+ const fileContent = new Uint8Array(event.target.result);
+ let context = {
+ file_content: fileContent,
+ file_name: file.name,
+ dirHandle: dirHandle
+ }
+ const { results, error } = await asyncRun(validate_metadata, context);
+ if(results){
+ console.log(results);
+ spinner.style.visibility = "hidden";
+ showResults(results);
+ }
+ if(error){
+ console.error(error);
+ spinner.style.visibility = "hidden";
+ showSystemError(error);
+ }
+ };
+ reader.readAsArrayBuffer(file);
+ }
+}
+
+function report_items_2_html(reports_list) {
+ let report = '
';
+ $.each(reports_list, function(index, report_item){
+ let lines = ''
+ if (report_item.lines) {
+ let lines_label = (report_item.lines.length > 1) ? 'Lines' : 'Line';
+ lines = lines_label+": "+report_item.lines.join(',')+ ' → ';
+ }
+ let message = report_item.message;
+ // Value highlighting
+ message = message.replace(/"(.+?)"/g, "\"$1\"");
+ // Leading space
+ message = message.replace(/"\s+/g, "\"_");
+ // Trailing space
+ message = message.replace(/\s+<\/b>"/g, "_\"");
+ // Column highlighting
+ message = message.replace(/'(.+?)'/g, "\'$1\'");
+ report += "- "+lines+message+"
";
+ });
+ report += '
';
+ return report;
+}
+
+function makeReportTable(data_spreadsheet_items, items_header){
+ let table_html = '
'+
+ 'Spreadsheet | '+items_header+' |
'+
+ '';
+ $.each(data_spreadsheet_items, function(spreadsheet, reports_list){
+ table_html += ""+spreadsheet+" | ";
+ table_html += report_items_2_html(reports_list);
+ table_html += ' |
';
+ });
+ table_html += '
';
+ return table_html;
+}
+
+function showResults(results){
+ let data = JSON.parse(results);
+ let status_style = (data.status === 'failed') ? '
Failed' : '
Passed';
+ let status_html = '
'+
+ ' File validation | '+status_style+' |
'+
+ '
';
+ $('#check_status').html(status_html);
+ // Error messages
+ if (data.error) {
+ let report = '
Error report
'
+ + makeReportTable(data.error, 'Error message(s)');
+ $('#report_error').html(report);
+ } else {
+ $('#report_error').html('');
+ }
+ // Warning messages
+ if (data.warning) {
+ let report = '
Warning report
'
+ + makeReportTable(data.warning, 'Warning message(s)');
+ $('#report_warning').html(report);
+ } else {
+ $('#report_warning').html('');
+ }
+ // Other messages
+ if (data.messages){
+ let report = '';
+ $.each(data.messages, function (index, message){
+ report = report + '
'+message+'
'+"\n";
+ })
+ $('#report_messages').html(report);
+ }
+}
+
+function showSystemError(errors){
+ let status_html = '
File validation: Failed
';
+ $('#check_status').html(status_html);
+ let error_msg = (errors && errors !== '') ? errors : 'Internal error';
+ let error_html = '
'+
+ '
'+
+ '
Error: '+error_msg+'
'+
+ '
'+
+ '
';
+ $('#report_error').html(error_html);
+}
+
+document.querySelector('#upload_btn').addEventListener('click', async () => {
+ await validateFile();
+});
diff --git a/validator/static/validator/js/py-worker.js b/validator/static/validator/js/py-worker.js
new file mode 100644
index 00000000..fa972dcf
--- /dev/null
+++ b/validator/static/validator/js/py-worker.js
@@ -0,0 +1,32 @@
+// Adapted from https://github.com/EBISPOT/gwas-sumstats-tools-ssf-morph
+// This script is setting up a way to run Python scripts asynchronously in a web worker. It sends the Python script to the worker and sets up a callback to handle the result when the worker has finished executing the script.
+const pyodideWorker = new Worker(new URL((on_gae) ? "webworker.min.js" : "webworker.js", import.meta.url, null));
+
+const callbacks = {};
+
+pyodideWorker.onmessage = (event) => {
+ const { id, ...data } = event.data;
+ const onSuccess = callbacks[id];
+ delete callbacks[id];
+ onSuccess(data);
+};
+//This id is incremented each time the function is invoked and is kept within the safe integer limit.
+
+
+const asyncRun = (() => {
+ let id = 0; // identify a Promise
+ return (script, context) => {
+ // the id could be generated more carefully
+ id = (id + 1) % Number.MAX_SAFE_INTEGER;
+ return new Promise((onSuccess) => {
+ callbacks[id] = onSuccess;
+ pyodideWorker.postMessage({
+ ...context,
+ python: script,
+ id,
+ });
+ });
+ };
+})();
+
+export { asyncRun };
\ No newline at end of file
diff --git a/validator/static/validator/js/scores_consumer.js b/validator/static/validator/js/scores_consumer.js
new file mode 100644
index 00000000..c78ace99
--- /dev/null
+++ b/validator/static/validator/js/scores_consumer.js
@@ -0,0 +1,118 @@
+const pyworker = await import((on_gae)?'./py-worker.min.js':'./py-worker.js');
+const asyncRun = pyworker.asyncRun;
+
+const validate_scores = await fetch(new URL('../python/bin/validation_scores.py', import.meta.url, null)).then(response => response.text());
+
+let dirHandle;
+let validateFileHandle;
+
+function toggleLoading(on){
+ const spinner = document.getElementById('pgs_loading');
+ spinner.style.visibility = on ? "visible" : "hidden";
+}
+
+function successMount(dirName){
+ document.getElementById('grant_message').innerHTML = 'Authorization granted on directory \"'+dirName+"\".";
+}
+
+
+async function mountLocalDirectory() {
+ // use the same ID crypt4gh to open pickers in the same directory
+ dirHandle = await showDirectoryPicker();
+
+ if ((await dirHandle.queryPermission({ mode: "read" })) !== "granted") {
+ if (
+ (await dirHandle.requestPermission({ mode: "read" })) !== "granted"
+ ) {
+ throw Error("Unable to read and write directory");
+ }
+ }
+ return dirHandle.name;
+}
+
+async function validation(validateFileHandle) {
+ let context;
+ if (!validateFileHandle) {
+ console.log('No single scoring file defined');
+ context = {
+ dirHandle: dirHandle,
+ outputFileName: null,
+ };
+ } else {
+ context = {
+ dirHandle: dirHandle,
+ outputFileName: validateFileHandle.name,
+ };
+ }
+
+ try {
+ const { results, error } = await asyncRun(validate_scores, context);
+ if (results) {
+ let data = JSON.parse(results);
+ if(data.status === 'success'){
+ validation_out.value = data.response;
+ console.log("pyodideWorker return results: ", data.response);
+ } else if (data.status === 'error'){
+ validation_out.value = '';
+ console.error("pyodideWorker returned error: ", data.error);
+ appendAlertToElement("error",'Error: '+data.error,'danger')
+ }
+ return results;
+ } else if (error) {
+ validation_out.value = '';
+ console.log("pyodideWorker error: ", error);
+ appendAlertToElement("error",'Error: '+error,'danger')
+ }
+ } catch (e) {
+ validation_out.value =`Error in pyodideWorker at ${e.filename}, Line: ${e.lineno}, ${e.message}`;
+ console.log(
+ `Error in pyodideWorker at ${e.filename}, Line: ${e.lineno}, ${e.message}`,
+ );
+ }
+}
+
+async function appendAlertToElement(elementId, message, type) {
+ const alertPlaceholder = document.getElementById(elementId);
+ if (!alertPlaceholder) {
+ console.error("Element with ID '" + elementId + "' not found.");
+ return;
+ }
+
+ const wrapper = document.createElement('div');
+ wrapper.innerHTML = [
+ `
`,
+ `
${message}
`,
+ '
Please contact the PGS-Catalog support if the problem persists.
',
+ '
',
+ '
'
+ ].join('');
+
+ alertPlaceholder.append(wrapper);
+}
+
+document.querySelector('#validate_directory').addEventListener('click', async () => {
+ validation_out.value = "Initializing validation...\n";
+ toggleLoading(true);
+ await validation(null);
+ toggleLoading(false);
+});
+
+document.querySelector('#mountvalidate').addEventListener('click', async () => {
+ if (!('showDirectoryPicker' in window)) {
+ alert('Your browser does not support the File System Access API. Please use a supported browser.');
+ return; // Stop execution if the API is not supported
+ }
+ else {
+ let dirName = await mountLocalDirectory();
+ successMount(dirName);
+ document.querySelector('#validate_single').disabled = false;
+ document.querySelector('#validate_directory').disabled = false;
+ }
+});
+
+document.querySelector('#validate_single').addEventListener('click', async () => {
+ [validateFileHandle] = await window.showOpenFilePicker();
+ toggleLoading(true);
+ await validation(validateFileHandle);
+ toggleLoading(false);
+});
diff --git a/validator/static/validator/js/webworker.js b/validator/static/validator/js/webworker.js
new file mode 100644
index 00000000..8cecac26
--- /dev/null
+++ b/validator/static/validator/js/webworker.js
@@ -0,0 +1,56 @@
+// webworker.js
+// Adapted from https://github.com/EBISPOT/gwas-sumstats-tools-ssf-morph
+
+// Setup your project to serve `py-worker.js`. You should also serve
+// `pyodide.js`, and all its associated `.asm.js`, `.json`,
+// and `.wasm` files as well:
+importScripts("https://cdn.jsdelivr.net/pyodide/v0.26.0/full/pyodide.js");
+
+const wheels_base_url = "/static/validator/python/wheels/";
+
+async function loadPyodideAndPackages() {
+ self.pyodide = await loadPyodide();
+ await pyodide.loadPackage("micropip");
+ const micropip = pyodide.pyimport("micropip");
+ await micropip.install(['openpyxl','requests','httpx==0.26.0','tenacity','pyliftover',
+ 'xopen==1.8.0','zstandard','tqdm','natsort','pandas','pandas-schema']);
+ await micropip.install(wheels_base_url+"pgs_template_validator-1.1.2-py3-none-any.whl", keep_going=true);
+ await micropip.install(wheels_base_url+"pgscatalog_validate-0.1-py3-none-any.whl", keep_going=true)
+ await pyodide.FS.createLazyFile('/home/pyodide/', 'TemplateColumns2Models.xlsx',
+ '/static/validator/template/TemplateColumns2Models.xlsx', true, false);
+}
+let pyodideReadyPromise = loadPyodideAndPackages();
+
+
+//This event is fired when the worker receives a message from the main thread via the postMessage method.
+self.onmessage = async (event) => {
+ // make sure loading is done
+ await pyodideReadyPromise;
+ // Don't bother yet with this line, suppose our API is built in such a way:
+ const { id, python, ...context } = event.data;
+ // The worker copies the context in its own "memory" (an object mapping name to values)
+ for (const key of Object.keys(context)) {
+ self[key] = context[key];
+ }
+ // Now is the easy part, the one that is similar to working in the main thread:
+ try {
+ await self.pyodide.loadPackagesFromImports(python);
+ // mount local directory, make the nativefs as a global vaiable.
+ if (! self.fsmounted && self.dirHandle){
+ self.nativefs = await self.pyodide.mountNativeFS("/data", self.dirHandle);
+ self.fsmounted = true;
+ }
+ // run python script
+ self.pyodide.globals.set('print', s => console.log(s))
+ let results = await self.pyodide.runPythonAsync(python);
+ // flush new files to disk
+ if(self.nativefs){
+ await self.nativefs.syncfs();
+ }
+
+ self.postMessage({ results, id });
+ } catch (error) {
+ console.log(error);
+ self.postMessage({ error: error.message, id });
+ }
+ };
diff --git a/validator/static/validator/python/bin/validation_metadata.py b/validator/static/validator/python/bin/validation_metadata.py
new file mode 100644
index 00000000..d5d6f2e4
--- /dev/null
+++ b/validator/static/validator/python/bin/validation_metadata.py
@@ -0,0 +1,120 @@
+import io
+import js
+import json
+from pyodide.http import open_url
+from validator.main_validator import PGSMetadataValidator
+from validator.request.connector import Connector, UnknownError, NotFound, Logger, ServiceNotWorking
+
+# Need a proxy for OLS as Pyodide causes a cross-origin issue with the OLS url
+OLS_URL = "https://ols-proxy-dot-pgs-catalog.appspot.com/ols-proxy/efo/%s"
+
+# In case of failure to fetch a GWAS, we can try to fetch the following one as positive test to confirm
+# that the error is really due to incorrect id or if the service is down. Both cases will return 404.
+TEST_GCST = 'GCST90132222'
+
+file = io.BytesIO(bytes(js.file_content))
+file_name = js.file_name
+
+
+class PyodideLogger(Logger):
+ def debug(self, message, name):
+ print(f'ERROR: {message}')
+
+
+class PyodideConnector(Connector):
+ """This customised connector is necessary as the 'requests' python module is not supported in WebAssembly.
+ Moreover, the requests for EFO traits must be redirected to a proxy to avoid cross-origin errors."""
+
+ # If a GWAS request returns 404, we will try with a test term. If the test term returns 404 too, this attributes is set to True.
+ gwas_is_down = False
+
+ def __init__(self):
+ super().__init__(logger=PyodideLogger())
+
+ def request(self, url, payload=None) -> dict:
+ if payload:
+ query = '&'.join([f"{k}={v}" for k, v in payload.items()])
+ url = url + '?' + query
+ # Using pyodide open_url instead of python requests.get()
+ query_result_io = open_url(url)
+ query_result = query_result_io.read()
+
+ result_json = json.loads(query_result)
+ return result_json
+
+ def get_efo_trait(self, efo_id) -> dict:
+ url = OLS_URL % efo_id.replace('_', ':')
+ response = self.request(url)
+ # If not found the response should return 404.
+ if '_embedded' in response and 'terms' in response['_embedded'] and len(response['_embedded']['terms']) == 1:
+ return response['_embedded']['terms'][0]
+ elif 'status' in response and response['status'] == 404:
+ raise NotFound(message=response['error'], url=url)
+ else:
+ raise UnknownError(message="Unexpected response from URL: %s" % url, url=url)
+
+ def get_gwas(self, gcst_id) -> dict:
+ try:
+ return super().get_gwas(gcst_id)
+ except Exception as e:
+ try:
+ super().get_gwas(TEST_GCST)
+ except Exception:
+ self.gwas_is_down = True
+ raise ServiceNotWorking()
+ raise e
+
+
+def validate():
+ pyodide_connector = PyodideConnector()
+ metadata_validator = PGSMetadataValidator(file, False, pyodide_connector)
+ metadata_validator.template_columns_schema_file = '/home/pyodide/TemplateColumns2Models.xlsx'
+ metadata_validator.parse_spreadsheets()
+ metadata_validator.parse_publication()
+ metadata_validator.parse_scores()
+ metadata_validator.parse_cohorts()
+ metadata_validator.parse_performances()
+ metadata_validator.parse_samples()
+ metadata_validator.post_parsing_checks()
+
+ report_text = 'No error'
+
+ response = {}
+
+ status = 'success'
+ if metadata_validator.report['error']:
+ status = 'failed'
+ response['error'] = {}
+ error_report = metadata_validator.report['error']
+ for error_spreadsheet in error_report:
+ response['error'][error_spreadsheet] = []
+ for error_msg in error_report[error_spreadsheet]:
+ error_entry = {'message': error_msg}
+ if error_report[error_spreadsheet][error_msg][0] != None:
+ error_entry['lines'] = error_report[error_spreadsheet][error_msg]
+ response['error'][error_spreadsheet].append(error_entry)
+
+ if metadata_validator.report['warning']:
+ response['warning'] = {}
+ warning_report = metadata_validator.report['warning']
+ for warning_spreadsheet in warning_report:
+ response['warning'][warning_spreadsheet] = []
+ for warning_msg in warning_report[warning_spreadsheet]:
+ warning_entry = {'message': warning_msg}
+ if warning_report[warning_spreadsheet][warning_msg][0] != None:
+ warning_entry['lines'] = warning_report[warning_spreadsheet][warning_msg]
+ response['warning'][warning_spreadsheet].append(warning_entry)
+
+ response['status'] = status
+
+ if pyodide_connector.gwas_is_down:
+ response['messages'] = [
+ 'Error: GWAS service seems down. Please retry validation later.'
+ ]
+
+ return response
+
+
+response = validate()
+
+json.dumps(response) # Is returned by pyodide.runPythonAsync()
diff --git a/validator/static/validator/python/bin/validation_scores.py b/validator/static/validator/python/bin/validation_scores.py
new file mode 100644
index 00000000..755376f3
--- /dev/null
+++ b/validator/static/validator/python/bin/validation_scores.py
@@ -0,0 +1,79 @@
+import glob
+import json
+import tempfile
+import os.path
+from pathlib import Path
+
+from js import outputFileName
+# from pgscatalog.validate.cli.validate_scorefile import validate_scorefile
+from pgscatalog.validate.cli.validate_scorefile import _check_args, _validate_scorefile
+
+
+# local file system is mounted in /data
+#input_path = Path("/data") / outputFileName
+
+
+class Args:
+ dir: str
+ log_dir: str
+ t: str
+ f: str
+ score_dir: str
+ check_filename: bool
+
+
+response = ''
+error = None
+
+try:
+
+ # At the moment the results of score validation are stored in individual log files in log_dir
+ with tempfile.TemporaryDirectory() as log_dir:
+
+ args = Args()
+ args.t = 'formatted'
+ args.check_filename = False
+ args.dir = None
+ args.f = None
+ args.log_dir = Path(log_dir)
+ args.score_dir = None
+
+ if outputFileName:
+ filename = str(Path("/data") / outputFileName)
+ if not os.path.exists(filename):
+ raise FileNotFoundError(filename)
+ args.f = filename
+ else:
+ args.dir = str(Path("/data"))
+
+ # Unconventional use of private functions but temporary
+ _check_args(args)
+ _validate_scorefile(args)
+
+ # Getting the validation results from the log files
+ for log_file in glob.glob(str(log_dir)+'/*_log.txt'):
+ file_name = log_file.split('/')[-1].removesuffix('_log.txt')
+ with open(log_file, 'r') as f:
+ content = f.read()
+ response = response + file_name + ":\n"
+ response = response + content + "\n"
+
+except FileNotFoundError as e:
+ error = "Could not read input file. Is the selected file in the directory with granted rights?"
+except Exception as e:
+ error = str(e)
+
+
+data = {}
+if error:
+ data = {
+ 'status': 'error',
+ 'error': error
+ }
+else:
+ data = {
+ 'status': 'success',
+ 'response': response
+ }
+
+json.dumps(data) # Is returned by pyodide.runPythonAsync()
diff --git a/validator/static/validator/python/wheels/pgs_template_validator-1.1.2-py3-none-any.whl b/validator/static/validator/python/wheels/pgs_template_validator-1.1.2-py3-none-any.whl
new file mode 100644
index 00000000..c57763c7
Binary files /dev/null and b/validator/static/validator/python/wheels/pgs_template_validator-1.1.2-py3-none-any.whl differ
diff --git a/validator/static/validator/python/wheels/pgscatalog_validate-0.1-py3-none-any.whl b/validator/static/validator/python/wheels/pgscatalog_validate-0.1-py3-none-any.whl
new file mode 100644
index 00000000..41041c25
Binary files /dev/null and b/validator/static/validator/python/wheels/pgscatalog_validate-0.1-py3-none-any.whl differ
diff --git a/validator/static/validator/template/TemplateColumns2Models.xlsx b/validator/static/validator/template/TemplateColumns2Models.xlsx
new file mode 100644
index 00000000..8957f4f0
Binary files /dev/null and b/validator/static/validator/template/TemplateColumns2Models.xlsx differ
diff --git a/validator/templates/validator/validate_metadata_client.html b/validator/templates/validator/validate_metadata_client.html
new file mode 100644
index 00000000..23e86837
--- /dev/null
+++ b/validator/templates/validator/validate_metadata_client.html
@@ -0,0 +1,60 @@
+{% extends 'catalog/base.html' %}
+{% load static %}
+
+{% block title %}Metadata Template Validation{% endblock %}
+
+{% block desc %}
+
+ {% if is_pgs_app_on_gae %}
+
+ {% else %}
+
+ {% endif %}
+{% endblock %}
+
+{% block content %}
+
+
+
Metadata Template Validation
+
+
+
+
+
+
+
+
+
+
+
More information about the Template file
here.
+
+
+
+
+
+
+
+
Data validation ...
+
+
+
+
+
+
+ The file extension is not xlsx.
+
+
+
+
+{% endblock %}
diff --git a/validator/templates/validator/validate_scoring_files_client.html b/validator/templates/validator/validate_scoring_files_client.html
new file mode 100644
index 00000000..37b75bfb
--- /dev/null
+++ b/validator/templates/validator/validate_scoring_files_client.html
@@ -0,0 +1,52 @@
+{% extends 'catalog/base.html' %}
+{% load static %}
+
+{% block title %}Scoring Files Validation{% endblock %}
+
+{% block desc %}
+
+ {% if is_pgs_app_on_gae %}
+
+ {% else %}
+
+ {% endif %}
+{% endblock %}
+
+{% block content %}
+
+
+
Scoring Files Validationbeta
+
+
+
+ 1) Please grant the permission to read your local directory that contains your scores to validate:
+
+
+
+
+ 2) Then click on to select only one file to validate in this directory,
+ or click on to validate all scoring files located in this directory.
+
+
+
+
+
+
Data validation ...
+
+
+
+
+
+
+{% endblock %}
diff --git a/validator/tests.py b/validator/tests.py
new file mode 100644
index 00000000..7ce503c2
--- /dev/null
+++ b/validator/tests.py
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/validator/urls.py b/validator/urls.py
new file mode 100644
index 00000000..cc6a2bac
--- /dev/null
+++ b/validator/urls.py
@@ -0,0 +1,7 @@
+from django.urls import path
+from . import views
+
+urlpatterns = [
+ path("validate_metadata/", views.validate_metadata_template_client, name="metadata_template_validation"),
+ path("labs/validate_scoring_files/", views.validate_scoring_files_client, name="scoring_files_validation"),
+]
\ No newline at end of file
diff --git a/validator/views.py b/validator/views.py
new file mode 100644
index 00000000..695c6e80
--- /dev/null
+++ b/validator/views.py
@@ -0,0 +1,9 @@
+from django.shortcuts import render
+
+
+def validate_metadata_template_client(request):
+ return render(request, 'validator/validate_metadata_client.html', {})
+
+
+def validate_scoring_files_client(request):
+ return render(request, 'validator/validate_scoring_files_client.html', {})