Skip to content

Commit

Permalink
fix: make sure we're assessing the right submission (#2159)
Browse files Browse the repository at this point in the history
* fix: make sure we're assessing the right submission

* chore: version bump
  • Loading branch information
jansenk authored Jan 12, 2024
1 parent fedddab commit f3bab90
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 7 deletions.
2 changes: 1 addition & 1 deletion openassessment/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
Initialization Information for Open Assessment Module
"""

__version__ = '6.0.24'
__version__ = '6.0.25'
17 changes: 13 additions & 4 deletions openassessment/xblock/apis/assessments/peer_assessment_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,20 @@ def get_peer_submission(self):
logger.exception(err)
return peer_submission

def assert_assessed_submission_uuid_matches(self, uuid_client):
submission = self.get_peer_submission() or {}
def assert_assessing_valid_submission(self, uuid_client):
"""
Validate that the forntend and backend agree on which submission is being assessed
"""
# Get the current active submission without pulling a new one
submission = self.get_active_assessment_submission()

# If there is no active submission (expired or never got one somehow), raise
if submission is None:
raise ServerClientUUIDMismatchException()

uuid_server = submission.get("uuid", None)

# If the server and client don't agree, raise
if uuid_server != uuid_client:
logger.warning(
"Irrelevant assessment submission: expected '%s', got '%s'",
Expand Down Expand Up @@ -207,8 +217,7 @@ def peer_assess(
if not workflow_data.has_workflow:
raise ReviewerMustHaveSubmittedException()

if assessed_submission_uuid:
peer_step_data.assert_assessed_submission_uuid_matches(assessed_submission_uuid)
peer_step_data.assert_assessing_valid_submission(assessed_submission_uuid)

try:
assessment = peer_api.create_assessment(
Expand Down
47 changes: 47 additions & 0 deletions openassessment/xblock/test/test_peer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,18 @@
import pytz

from openassessment.assessment.api import peer as peer_api
from openassessment.assessment.models.peer import PeerWorkflowItem
from openassessment.workflow import api as workflow_api
from openassessment.xblock.apis.assessments.errors import ServerClientUUIDMismatchException
from openassessment.xblock.apis.assessments.peer_assessment_api import peer_assess
from openassessment.xblock.utils.data_conversion import create_submission_dict

from .base import SubmissionTestMixin, XBlockHandlerTestCase, scenario

logger = logging.getLogger(__name__)


@ddt.ddt
class TestPeerAssessment(XBlockHandlerTestCase, SubmissionTestMixin):
"""
Test integration of the OpenAssessment XBlock with the peer assessment API.
Expand Down Expand Up @@ -308,6 +312,49 @@ def test_turbo_grading(self, xblock):
self.assertNotIn(submission["answer"]["parts"][1]["text"], peer_response.body.decode('utf-8'))
self.assertIn("You have successfully completed", peer_response.body.decode('utf-8'))

@scenario('data/peer_assessment_scenario.xml', user_id='Sally')
@ddt.data(None, "nonmatchinguuid")
def test_assess_expired_peer_item(self, xblock, client_uuid):

# Create two submissions, one for sally and one for Hal
student_item = xblock.get_student_item_dict()
sally_student_item = copy.deepcopy(student_item)
sally_student_item['student_id'] = "Sally"
sally_submission = self.create_test_submission(
xblock, student_item=sally_student_item, submission_text=("Sally's answer 1", "Sally's answer 2")
)

hal_student_item = copy.deepcopy(student_item)
hal_student_item['student_id'] = "Hal"
hal_submission = self.create_test_submission(
xblock, student_item=hal_student_item, submission_text=("Hal's answer 1", "Hal's answer 2")
)

# Sally is given Hal to assess
hal_sub = peer_api.get_submission_to_assess(sally_submission['uuid'], 1)
assert hal_sub['uuid'] == hal_submission['uuid']
assert peer_api.get_active_assessment_submission(sally_submission['uuid']) == hal_sub

# Modify the peer workflow item to be too old to be "active"
pwi = PeerWorkflowItem.objects.last()
assert pwi.submission_uuid == hal_submission['uuid']
pwi.started_at = pwi.started_at - dt.timedelta(days=1)
pwi.save()
assert peer_api.get_active_assessment_submission(sally_submission['uuid']) is None

# An error should be raised when sally tried to submit an assessment
# because she doesn't have an active submission
with self.assertRaises(ServerClientUUIDMismatchException):
peer_assess(
self.ASSESSMENT['options_selected'],
self.ASSESSMENT['overall_feedback'],
self.ASSESSMENT['criterion_feedback'],
xblock.config_data,
xblock.workflow_data,
xblock.peer_assessment_data(),
assessed_submission_uuid=client_uuid
)


@ddt.ddt
class TestPeerAssessmentRender(XBlockHandlerTestCase, SubmissionTestMixin):
Expand Down
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "edx-ora2",
"version": "6.0.24",
"version": "6.0.25",
"repository": "https://github.com/openedx/edx-ora2.git",
"dependencies": {
"@edx/frontend-build": "8.0.6",
Expand Down

0 comments on commit f3bab90

Please sign in to comment.