Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
E
edx-platform-release
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Package Registry
Model registry
Operate
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Hsin-Yu Chien
edx-platform-release
Commits
84235529
Commit
84235529
authored
11 years ago
by
David Ormsbee
Browse files
Options
Downloads
Plain Diff
Merge pull request #1816 from edx/ormsbee/grading_tests
Add basic tests of gradeset iteration.
parents
c62abedd
f10df353
Loading
Loading
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
lms/djangoapps/courseware/tests/test_grades.py
+122
-0
122 additions, 0 deletions
lms/djangoapps/courseware/tests/test_grades.py
with
122 additions
and
0 deletions
lms/djangoapps/courseware/tests/test_grades.py
0 → 100644
+
122
−
0
View file @
84235529
"""
Test grade calculation.
"""
from
django.http
import
Http404
from
django.test.utils
import
override_settings
from
mock
import
patch
from
courseware.tests.modulestore_config
import
TEST_DATA_MIXED_MODULESTORE
from
student.tests.factories
import
UserFactory
from
xmodule.modulestore.tests.factories
import
CourseFactory
from
xmodule.modulestore.tests.django_utils
import
ModuleStoreTestCase
from
courseware.grades
import
grade
,
iterate_grades_for
def
_grade_with_errors
(
student
,
request
,
course
,
keep_raw_scores
=
False
):
"""
This fake grade method will throw exceptions for student3 and
student4, but allow any other students to go through normal grading.
It
'
s meant to simulate when something goes really wrong while trying to
grade a particular student, so we can test that we won
'
t kill the entire
course grading run.
"""
if
student
.
username
in
[
'
student3
'
,
'
student4
'
]:
raise
Exception
(
"
I don
'
t like {}
"
.
format
(
student
.
username
))
return
grade
(
student
,
request
,
course
,
keep_raw_scores
=
keep_raw_scores
)
@override_settings
(
MODULESTORE
=
TEST_DATA_MIXED_MODULESTORE
)
class
TestGradeIteration
(
ModuleStoreTestCase
):
"""
Test iteration through student gradesets.
"""
COURSE_NUM
=
"
1000
"
COURSE_NAME
=
"
grading_test_course
"
def
setUp
(
self
):
"""
Create a course and a handful of users to assign grades
"""
self
.
course
=
CourseFactory
.
create
(
display_name
=
self
.
COURSE_NAME
,
number
=
self
.
COURSE_NUM
)
self
.
students
=
[
UserFactory
.
create
(
username
=
'
student1
'
),
UserFactory
.
create
(
username
=
'
student2
'
),
UserFactory
.
create
(
username
=
'
student3
'
),
UserFactory
.
create
(
username
=
'
student4
'
),
UserFactory
.
create
(
username
=
'
student5
'
),
]
def
test_empty_student_list
(
self
):
"""
If we don
'
t pass in any students, it should return a zero-length
iterator, but it shouldn
'
t error.
"""
gradeset_results
=
list
(
iterate_grades_for
(
self
.
course
.
id
,
[]))
self
.
assertEqual
(
gradeset_results
,
[])
def
test_nonexistent_course
(
self
):
"""
If the course we want to get grades for does not exist, a `Http404`
should be raised. This is a horrible crossing of abstraction boundaries
and should be fixed, but for now we
'
re just testing the behavior. :-(
"""
with
self
.
assertRaises
(
Http404
):
gradeset_results
=
iterate_grades_for
(
"
I/dont/exist
"
,
[])
gradeset_results
.
next
()
def
test_all_empty_grades
(
self
):
"""
No students have grade entries
"""
all_gradesets
,
all_errors
=
self
.
_gradesets_and_errors_for
(
self
.
course
.
id
,
self
.
students
)
self
.
assertEqual
(
len
(
all_errors
),
0
)
for
gradeset
in
all_gradesets
.
values
():
self
.
assertIsNone
(
gradeset
[
'
grade
'
])
self
.
assertEqual
(
gradeset
[
'
percent
'
],
0.0
)
@patch
(
'
courseware.grades.grade
'
,
_grade_with_errors
)
def
test_grading_exception
(
self
):
"""
Test that we correctly capture exception messages that bubble up from
grading. Note that we only see errors at this level if the grading
process for this student fails entirely due to an unexpected event --
having errors in the problem sets will not trigger this.
We patch the grade() method with our own, which will generate the errors
for student3 and student4.
"""
all_gradesets
,
all_errors
=
self
.
_gradesets_and_errors_for
(
self
.
course
.
id
,
self
.
students
)
student1
,
student2
,
student3
,
student4
,
student5
=
self
.
students
self
.
assertEqual
(
all_errors
,
{
student3
:
"
I don
'
t like student3
"
,
student4
:
"
I don
'
t like student4
"
}
)
# But we should still have five gradesets
self
.
assertEqual
(
len
(
all_gradesets
),
5
)
# Even though two will simply be empty
self
.
assertFalse
(
all_gradesets
[
student3
])
self
.
assertFalse
(
all_gradesets
[
student4
])
# The rest will have grade information in them
self
.
assertTrue
(
all_gradesets
[
student1
])
self
.
assertTrue
(
all_gradesets
[
student2
])
self
.
assertTrue
(
all_gradesets
[
student5
])
################################# Helpers #################################
def
_gradesets_and_errors_for
(
self
,
course_id
,
students
):
"""
Simple helper method to iterate through student grades and give us
two dictionaries -- one that has all students and their respective
gradesets, and one that has only students that could not be graded and
their respective error messages.
"""
students_to_gradesets
=
{}
students_to_errors
=
{}
for
student
,
gradeset
,
err_msg
in
iterate_grades_for
(
course_id
,
students
):
students_to_gradesets
[
student
]
=
gradeset
if
err_msg
:
students_to_errors
[
student
]
=
err_msg
return
students_to_gradesets
,
students_to_errors
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment