diff --git a/lms/services/lti_grading/_v13.py b/lms/services/lti_grading/_v13.py index 3198320ae9..a6cd2ab203 100644 --- a/lms/services/lti_grading/_v13.py +++ b/lms/services/lti_grading/_v13.py @@ -2,7 +2,7 @@ from datetime import datetime, timezone from urllib.parse import urlparse -from lms.models import LTIRegistration +from lms.models import ApplicationInstance, LTIRegistration from lms.product.family import Family from lms.product.plugin.misc import MiscPlugin from lms.services.exceptions import ExternalRequestError, StudentNotInCourse @@ -94,7 +94,7 @@ def get_score_maximum(self, resource_link_id) -> float | None: def sync_grade( # noqa: PLR0913 self, - lti_registration: LTIRegistration, + application_instance: ApplicationInstance, lis_outcome_service_url: str, grade_timestamp: str, user_grading_id: str, @@ -108,7 +108,7 @@ def sync_grade( # noqa: PLR0913 """ payload = self._record_score_payload(score, user_grading_id, grade_timestamp) - if lti_registration.product_family == Family.CANVAS: + if application_instance.lti_registration.product_family == Family.CANVAS: # By default Canvas calls to /score create a new submission # Disable that behaviour and just submit the new grade. # See: https://canvas.instructure.com/doc/api/score.html @@ -116,7 +116,7 @@ def sync_grade( # noqa: PLR0913 "new_submission": False } return self._ltia_service.request( - lti_registration, + application_instance.lti_registration, "POST", self._service_url(lis_outcome_service_url, "/scores"), scopes=self.LTIA_SCOPES, diff --git a/lms/tasks/grading.py b/lms/tasks/grading.py index 67617ff9c8..fb15dd7832 100644 --- a/lms/tasks/grading.py +++ b/lms/tasks/grading.py @@ -1,12 +1,6 @@ from sqlalchemy import exists, select -from lms.models import ( - ApplicationInstance, - GradingSync, - GradingSyncGrade, - Grouping, - LTIRegistration, -) +from lms.models import GradingSync, GradingSyncGrade from lms.services import LTIAHTTPService from lms.services.lti_grading.factory import LTI13GradingService from lms.tasks.celery import app @@ -30,18 +24,8 @@ def sync_grades(): assert ( assignment.lis_outcome_service_url ), "Assignment without grading URL" - lti_registration = request.db.scalars( - select(LTIRegistration) - .join(ApplicationInstance) - .join(Grouping) - .where(Grouping.id == assignment.course_id) - .order_by(LTIRegistration.updated.desc()) - ).first() - assert lti_registration, "No LTI registraion for LTI1.3 assignment" - for grade in sync.grades: sync_grade.delay( - lti_registration_id=lti_registration.id, lis_outcome_service_url=assignment.lis_outcome_service_url, grading_sync_grade_id=grade.id, ) @@ -54,14 +38,13 @@ def sync_grades(): retry_backoff=10, autoretry_for=(Exception,), ) -def sync_grade( - *, lti_registration_id, lis_outcome_service_url: str, grading_sync_grade_id: int -): +def sync_grade(*, lis_outcome_service_url: str, grading_sync_grade_id: int): """Send one particular grade to the LMS.""" with app.request_context() as request: with request.tm: grading_sync_grade = request.db.get(GradingSyncGrade, grading_sync_grade_id) grading_sync = grading_sync_grade.grading_sync + application_instance = grading_sync.assignment.course.application_instance grading_service = LTI13GradingService( ltia_service=request.find_service(LTIAHTTPService), @@ -73,7 +56,7 @@ def sync_grade( ) try: grading_service.sync_grade( - request.db.get(LTIRegistration, lti_registration_id), + application_instance, lis_outcome_service_url, grading_sync.created.isoformat(), grading_sync_grade.lms_user.lti_v13_user_id, diff --git a/tests/unit/lms/services/lti_grading/_v13_test.py b/tests/unit/lms/services/lti_grading/_v13_test.py index 7a57c95636..ab1be0ae18 100644 --- a/tests/unit/lms/services/lti_grading/_v13_test.py +++ b/tests/unit/lms/services/lti_grading/_v13_test.py @@ -150,12 +150,16 @@ def test_get_score_maximum_no_line_item(self, svc, ltia_http_service): assert not svc.get_score_maximum(sentinel.resource_link_id) @pytest.mark.parametrize("is_canvas", [True, False]) - def test_sync_grade(self, svc, ltia_http_service, lti_registration, is_canvas): + def test_sync_grade( + self, svc, ltia_http_service, lti_v13_application_instance, is_canvas + ): if is_canvas: - lti_registration.issuer = "https://canvas.instructure.com" + lti_v13_application_instance.lti_registration.issuer = ( + "https://canvas.instructure.com" + ) response = svc.sync_grade( - lti_registration, + lti_v13_application_instance, "LIS_OUTCOME_SERVICE_URL", datetime(2022, 4, 4).isoformat(), sentinel.user_id, @@ -176,7 +180,7 @@ def test_sync_grade(self, svc, ltia_http_service, lti_registration, is_canvas): } ltia_http_service.request.assert_called_once_with( - lti_registration, + lti_v13_application_instance.lti_registration, "POST", "LIS_OUTCOME_SERVICE_URL/scores", scopes=svc.LTIA_SCOPES, diff --git a/tests/unit/lms/tasks/grading_test.py b/tests/unit/lms/tasks/grading_test.py index 7a76eb85f1..2c2ecae823 100644 --- a/tests/unit/lms/tasks/grading_test.py +++ b/tests/unit/lms/tasks/grading_test.py @@ -8,16 +8,12 @@ class TestGradingTasks: - def test_sync_grades(self, lti_v13_application_instance, sync_grade, grading_sync): + def test_sync_grades(self, sync_grade, grading_sync): sync_grades() sync_grade.delay.assert_has_calls( [ - call( - lti_registration_id=lti_v13_application_instance.lti_registration_id, - lis_outcome_service_url="URL", - grading_sync_grade_id=grade.id, - ) + call(lis_outcome_service_url="URL", grading_sync_grade_id=grade.id) for grade in grading_sync.grades ], any_order=True, @@ -33,7 +29,6 @@ def test_sync_grade( sync_grades_complete, ): sync_grade( - lti_registration_id=lti_v13_application_instance.lti_registration_id, lis_outcome_service_url="URL", grading_sync_grade_id=grading_sync.grades[0].id, ) @@ -49,7 +44,7 @@ def test_sync_grade( grading_service = LTI13GradingService.return_value grading_service.sync_grade.assert_called_once_with( - lti_v13_application_instance.lti_registration, + lti_v13_application_instance, "URL", grading_sync.created.isoformat(), grading_sync.grades[0].lms_user.lti_v13_user_id, @@ -62,11 +57,7 @@ def test_sync_grade( @pytest.mark.usefixtures("ltia_http_service") def test_sync_grade_raises( - self, - grading_sync, - lti_v13_application_instance, - LTI13GradingService, - sync_grades_complete, + self, grading_sync, LTI13GradingService, sync_grades_complete ): grading_service = LTI13GradingService.return_value grading_service.sync_grade.side_effect = Exception @@ -74,7 +65,6 @@ def test_sync_grade_raises( with pytest.raises(Exception): sync_grade( - lti_registration_id=lti_v13_application_instance.lti_registration_id, lis_outcome_service_url="URL", grading_sync_grade_id=grading_sync.grades[0].id, ) @@ -84,18 +74,13 @@ def test_sync_grade_raises( @pytest.mark.usefixtures("ltia_http_service") def test_sync_grade_last_retry( - self, - grading_sync, - lti_v13_application_instance, - LTI13GradingService, - sync_grades_complete, + self, grading_sync, LTI13GradingService, sync_grades_complete ): grading_service = LTI13GradingService.return_value grading_service.sync_grade.side_effect = Exception sync_grade.max_retries = 0 sync_grade( - lti_registration_id=lti_v13_application_instance.lti_registration_id, lis_outcome_service_url="URL", grading_sync_grade_id=grading_sync.grades[0].id, )