{% trans "Step 2:" %} {% trans "(On your own machine) Fill out grades." %}
+
{% trans "Open the CSV in a spreadsheet editor and assign scores to learners via “New Points” field. Leave scores that you don’t want to change blank." %}
+
+
{% trans "Step 3:" %} {% trans "Import scores." %}
+
{% trans "Upload the filled out CSV. Learners will immediately see their grades after import completes." %}
+
{% trans "Note: Supports file sizes up to 4MB." %}
diff --git a/build/lib/staff_graded/static/js/src/staff_graded.js b/build/lib/staff_graded/static/js/src/staff_graded.js
new file mode 100644
index 0000000..38d20e6
--- /dev/null
+++ b/build/lib/staff_graded/static/js/src/staff_graded.js
@@ -0,0 +1,109 @@
+(function() {
+ 'use strict'
+
+ function doneLoading(blockId, data) {
+ $(`#${blockId}-spinner`).hide();
+ if (data.error_rows.length || data.error_messages.length) {
+ var message = '';
+ if (data.error_rows.length) {
+ message += interpolate_text(
+ ngettext('{error_count} error. Please try again. ',
+ '{error_count} errors. Please try again. ',
+ data.error_rows.length),
+ { error_count: data.error_rows.length });
+ }
+ if (data.error_messages.length) {
+ message += ' ';
+ message += data.error_messages;
+ }
+ } else {
+ var message = interpolate_text(
+ ngettext('Processed {row_count} row. ',
+ 'Processed {row_count} rows. ',
+ data.total), { row_count:data.total }) +
+ interpolate_text(
+ ngettext('Updated scores for {row_count} learner.',
+ 'Updated scores for {row_count} learners.',
+ data.saved), { row_count: data.saved });
+ }
+ $(`#${blockId}-status`).show();
+ $(`#${blockId}-status .message`).html(message);
+ };
+
+ function pollResults(blockId, poll_url, result_id) {
+ $.ajax({
+ url: poll_url,
+ type: 'POST',
+ data: {result_id: result_id},
+ success: function(data) {
+ if (data.waiting) {
+ setTimeout(function(){
+ pollResults(blockId, poll_url, result_id);
+ }, 1000);
+ } else {
+ doneLoading(blockId, data);
+ }
+ }
+ });
+ };
+
+
+ this.StaffGradedProblem = function(runtime, element, json_args) {
+ var $element = $(element);
+ var fileInput = $element.find('.file-input');
+ var $exportButton = $element.find('.export-button');
+ fileInput.change(function(e){
+ var firstFile = this.files[0];
+ var self = this;
+ if (firstFile == undefined) {
+ return;
+ } else if (firstFile.size > 4194303) {
+ var message = gettext('Files must be less than 4MB. Please split the file into smaller chunks and upload again.');
+ $(`#${json_args.id}-status`).show();
+ $(`#${json_args.id}-status .message`).html(message);
+ return;
+ }
+ var formData = new FormData();
+ formData.append('csrfmiddlewaretoken', json_args.csrf_token);
+ formData.append('csv', firstFile);
+
+ $element.find('.filename').html(firstFile.name);
+ $element.find('.status').hide();
+ $element.find('.spinner').show();
+ $.ajax({
+ url : json_args.import_url,
+ type : 'POST',
+ data : formData,
+ processData: false, // tell jQuery not to process the data
+ contentType: false, // tell jQuery not to set contentType
+ success : function(data) {
+ self.value = '';
+ if (data.waiting) {
+ setTimeout(function() {
+ pollResults(json_args.id, json_args.poll_url, data.result_id);
+ }, 1000);
+ } else {
+ doneLoading(json_args.id, data);
+ }
+ }
+ });
+
+ });
+
+ $exportButton.click(function(e) {
+ e.preventDefault();
+ var url = $exportButton.attr('href') + '?' + $.param(
+ {
+ track: $element.find('.track-field').val(),
+ cohort: $element.find('.cohort-field').val()
+ }
+ );
+ location.href = url;
+ });
+
+ };
+
+ this.StaffGradedXBlock = function(runtime, element) {
+ };
+
+}).call(this);
diff --git a/setup.py b/setup.py
index ab5e889..61d2047 100644
--- a/setup.py
+++ b/setup.py
@@ -25,24 +25,96 @@ def package_data(pkg, roots):
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
+
+ Requirements will include any constraints from files specified
+ with -c in the requirements files.
Returns a list of requirement strings.
"""
- requirements = set()
- for path in requirements_paths:
- with open(path, encoding='utf-8') as reqs:
- requirements.update(
- line.split('#')[0].strip() for line in reqs
- if is_requirement(line.strip())
+ # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why.
+
+ # e.g. {"django": "Django", "confluent-kafka": "confluent_kafka[avro]"}
+ by_canonical_name = {}
+
+ def check_name_consistent(package):
+ """
+ Raise exception if package is named different ways.
+
+ This ensures that packages are named consistently so we can match
+ constraints to packages. It also ensures that if we require a package
+ with extras we don't constrain it without mentioning the extras (since
+ that too would interfere with matching constraints.)
+ """
+ canonical = package.lower().replace('_', '-').split('[')[0]
+ seen_spelling = by_canonical_name.get(canonical)
+ if seen_spelling is None:
+ by_canonical_name[canonical] = package
+ elif seen_spelling != package:
+ raise Exception(
+ f'Encountered both "{seen_spelling}" and "{package}" in requirements '
+ 'and constraints files; please use just one or the other.'
)
- return list(requirements)
+
+ requirements = {}
+ constraint_files = set()
+
+ # groups "pkg<=x.y.z,..." into ("pkg", "<=x.y.z,...")
+ re_package_name_base_chars = r"a-zA-Z0-9\-_." # chars allowed in base package name
+ # Two groups: name[maybe,extras], and optionally a constraint
+ requirement_line_regex = re.compile(
+ r"([%s]+(?:\[[%s,\s]+\])?)([<>=][^#\s]+)?"
+ % (re_package_name_base_chars, re_package_name_base_chars)
+ )
+
+ def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present):
+ regex_match = requirement_line_regex.match(current_line)
+ if regex_match:
+ package = regex_match.group(1)
+ version_constraints = regex_match.group(2)
+ check_name_consistent(package)
+ existing_version_constraints = current_requirements.get(package, None)
+ # It's fine to add constraints to an unconstrained package,
+ # but raise an error if there are already constraints in place.
+ if existing_version_constraints and existing_version_constraints != version_constraints:
+ raise BaseException(f'Multiple constraint definitions found for {package}:'
+ f' "{existing_version_constraints}" and "{version_constraints}".'
+ f'Combine constraints into one location with {package}'
+ f'{existing_version_constraints},{version_constraints}.')
+ if add_if_not_present or package in current_requirements:
+ current_requirements[package] = version_constraints
+
+ # Read requirements from .in files and store the path to any
+ # constraint files that are pulled in.
+ for path in requirements_paths:
+ with open(path) as reqs:
+ for line in reqs:
+ if is_requirement(line):
+ add_version_constraint_or_raise(line, requirements, True)
+ if line and line.startswith('-c') and not line.startswith('-c http'):
+ constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip())
+
+ # process constraint files: add constraints to existing requirements
+ for constraint_file in constraint_files:
+ with open(constraint_file) as reader:
+ for line in reader:
+ if is_requirement(line):
+ add_version_constraint_or_raise(line, requirements, False)
+
+ # process back into list of pkg><=constraints strings
+ constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())]
+ return constrained_requirements
def is_requirement(line):
"""
- Return True if the requirement line is a package requirement;
- that is, it is not blank, a comment, a URL, or an included file.
+ Return True if the requirement line is a package requirement.
+
+ Returns:
+ bool: True if the line is not blank, a comment,
+ a URL, or an included file
"""
- return line and not line.startswith(('-r', '#', '-e', 'git+', '-c'))
+ # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why
+
+ return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c'))
def get_version(file_path):