diff --git a/openassessment/runtime_imports/classes.py b/openassessment/runtime_imports/classes.py index 732278e1da..89cdd4b4d3 100644 --- a/openassessment/runtime_imports/classes.py +++ b/openassessment/runtime_imports/classes.py @@ -36,3 +36,11 @@ def import_waffle_flag(): """ from edx_toggles.toggles import WaffleFlag return WaffleFlag + + +def import_student_module(): + """ + Helper method that imports StudentModule from edx-platform at runtime. + """ + from lms.djangoapps.courseware.models import StudentModule + return StudentModule diff --git a/openassessment/templates/legacy/edit/oa_edit_basic_settings_list.html b/openassessment/templates/legacy/edit/oa_edit_basic_settings_list.html index 25cf0d324d..b2b5647a9a 100644 --- a/openassessment/templates/legacy/edit/oa_edit_basic_settings_list.html +++ b/openassessment/templates/legacy/edit/oa_edit_basic_settings_list.html @@ -171,7 +171,7 @@

- {% trans "Specify whether learners can reset and resubmit their work within the alocated period as long as it has not yet been graded." %} + {% trans "Specify whether learners can reset and resubmit their work within an allocated period as long as it has not yet been graded." %}

diff --git a/openassessment/templates/legacy/response/oa_response_submitted.html b/openassessment/templates/legacy/response/oa_response_submitted.html index 807fe73125..b31edd2fc8 100644 --- a/openassessment/templates/legacy/response/oa_response_submitted.html +++ b/openassessment/templates/legacy/response/oa_response_submitted.html @@ -66,7 +66,7 @@
{% trans "We could not reset your response" %}
diff --git a/openassessment/xblock/openassessmentblock.py b/openassessment/xblock/openassessmentblock.py index 367eb13432..788886ab6b 100644 --- a/openassessment/xblock/openassessmentblock.py +++ b/openassessment/xblock/openassessmentblock.py @@ -11,6 +11,7 @@ import pytz from django.conf import settings +from django.contrib.auth import get_user_model from django.template.loader import get_template from bleach.sanitizer import Cleaner @@ -21,6 +22,7 @@ from xblock.fields import Boolean, Dict, Integer, List, Scope, String from openassessment.runtime_imports.functions import reset_student_attempts, get_user_by_username_or_email +from openassessment.runtime_imports.classes import import_student_module from openassessment.staffgrader.staff_grader_mixin import StaffGraderMixin from openassessment.workflow.errors import AssessmentWorkflowError from openassessment.xblock.apis.grades_api import GradesAPI @@ -193,13 +195,13 @@ class OpenAssessmentBlock( resubmissions_grace_period_hours = Integer( default=0, scope=Scope.content, - help="The number of hours after the submission due date that resubmissions are allowed." + help="The number of hours after the student's submission date that resubmissions are allowed." ) resubmissions_grace_period_minutes = Integer( default=0, scope=Scope.content, - help="The number of minutes after the submission due date that resubmissions are allowed." + help="The number of minutes after the student's submission date that resubmissions are allowed." ) rubric_criteria = List( @@ -1367,13 +1369,22 @@ def reset_submission(self, data, suffix=""): # pylint: disable=unused-argument Returns: dict: A dictionary indication the status with keys 'success' (bool) and 'msg' (str) """ + StudentModule = import_student_module() # pylint: disable=invalid-name + block_user = self.runtime.service(self, "user").get_current_user() + username = block_user.opt_attrs.get("edx-platform.username") + + try: + user = get_user_by_username_or_email(username) + except get_user_model().DoesNotExist as error: + logger.exception(f"An error occurred while resetting the submission: {error}") + return {"success": False, "msg": self._("The user does not exist.")} + try: - block_user = self.runtime.service(self, "user").get_current_user() - user = get_user_by_username_or_email(block_user.opt_attrs.get("edx-platform.username")) reset_student_attempts(self.course_id, user, self.location, user, True) # pylint: disable=no-member - except Exception as error: + except StudentModule.DoesNotExist as error: logger.exception(f"An error occurred while resetting the submission: {error}") - return {"success": False, "msg": self._("Error resetting submission.")} + return {"success": False, "msg": self._("There is no submission to reset.")} + return {"success": True, "msg": self._("Submission reset successfully.")} @XBlock.json_handler diff --git a/openassessment/xblock/test/data/update_xblock.json b/openassessment/xblock/test/data/update_xblock.json index 0dcfbf3c5e..8508876083 100644 --- a/openassessment/xblock/test/data/update_xblock.json +++ b/openassessment/xblock/test/data/update_xblock.json @@ -105,8 +105,8 @@ "show_rubric_during_response": false, "allow_latex": false, "allow_learner_resubmissions": true, - "resubmissions_grace_period_hours": 0, - "resubmissions_grace_period_minutes": 0, + "resubmissions_grace_period_hours": 1, + "resubmissions_grace_period_minutes": 30, "leaderboard_show": 0, "assessments": [ { @@ -166,8 +166,8 @@ "show_rubric_during_response": false, "allow_latex": false, "allow_learner_resubmissions": true, - "resubmissions_grace_period_hours": 0, - "resubmissions_grace_period_minutes": 0, + "resubmissions_grace_period_hours": 2, + "resubmissions_grace_period_minutes": 59, "leaderboard_show": 0, "assessments": [ { @@ -227,7 +227,7 @@ "show_rubric_during_response": false, "allow_latex": false, "allow_learner_resubmissions": true, - "resubmissions_grace_period_hours": 0, + "resubmissions_grace_period_hours": 3, "resubmissions_grace_period_minutes": 0, "leaderboard_show": 0, "assessments": [ @@ -300,7 +300,7 @@ "allow_latex": false, "allow_learner_resubmissions": true, "resubmissions_grace_period_hours": 0, - "resubmissions_grace_period_minutes": 0, + "resubmissions_grace_period_minutes": 40, "leaderboard_show": 0, "title": "My new title.", "assessments": [ @@ -361,7 +361,7 @@ "allow_latex": false, "allow_learner_resubmissions": true, "resubmissions_grace_period_hours": 0, - "resubmissions_grace_period_minutes": 0, + "resubmissions_grace_period_minutes": 1, "leaderboard_show": 0, "title": "My new title.", "assessments": [ @@ -421,7 +421,7 @@ "show_rubric_during_response": false, "allow_latex": false, "allow_learner_resubmissions": true, - "resubmissions_grace_period_hours": 0, + "resubmissions_grace_period_hours": 1, "resubmissions_grace_period_minutes": 0, "leaderboard_show": 0, "title": "My new title.", @@ -482,8 +482,8 @@ "show_rubric_during_response": false, "allow_latex": false, "allow_learner_resubmissions": true, - "resubmissions_grace_period_hours": 0, - "resubmissions_grace_period_minutes": 0, + "resubmissions_grace_period_hours": 5, + "resubmissions_grace_period_minutes": 5, "leaderboard_show": 0, "title": "My new title.", "assessments": [ diff --git a/openassessment/xblock/utils/allow_resubmission.py b/openassessment/xblock/utils/allow_resubmission.py index a9d63ab265..1be85b2e7a 100644 --- a/openassessment/xblock/utils/allow_resubmission.py +++ b/openassessment/xblock/utils/allow_resubmission.py @@ -56,20 +56,19 @@ def submission_date_exceeded(config_data, submission_data: dict) -> bool: Returns: bool: True if the submission date has been exceeded, False otherwise. """ - ora_due_date = datetime.strptime(config_data.submission_due, "%Y-%m-%dT%H:%M:%S%z") - current_datetime = datetime.now(pytz.UTC) - ora_due_date = ora_due_date.replace(tzinfo=pytz.UTC) - if current_datetime >= ora_due_date: + is_closed, reason, _, _ = config_data.is_closed(step="submission") + if is_closed and reason == "due": return True hours = config_data.resubmissions_grace_period_hours minutes = config_data.resubmissions_grace_period_minutes - if hours == 0 and minutes == 0: + if not hours and not minutes: return False - delta_time = timedelta(hours=int(hours), minutes=int(minutes)) - grace_period = submission_data["created_at"] + delta_time - return current_datetime >= grace_period + current_datetime = datetime.now(pytz.UTC) + grace_period = timedelta(hours=int(hours), minutes=int(minutes)) + deadline_datetime = submission_data["created_at"] + grace_period + return current_datetime >= deadline_datetime def has_been_graded(workflow_data) -> bool: