Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
E
edx-platform-release
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Package Registry
Model registry
Operate
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Hsin-Yu Chien
edx-platform-release
Commits
f4a3c544
Commit
f4a3c544
authored
12 years ago
by
ichuang
Browse files
Options
Downloads
Patches
Plain Diff
fix xmodule/capa tests to use new CorrectMap
parent
2af525f1
Loading
Loading
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
common/lib/capa/capa_problem.py
+2
-2
2 additions, 2 deletions
common/lib/capa/capa_problem.py
common/lib/xmodule/tests.py
+17
-17
17 additions, 17 deletions
common/lib/xmodule/tests.py
with
19 additions
and
19 deletions
common/lib/capa/capa_problem.py
+
2
−
2
View file @
f4a3c544
...
...
@@ -200,12 +200,12 @@ class LoncapaProblem(object):
self
.
student_answers
=
answers
oldcmap
=
self
.
correct_map
# old CorrectMap
newcmap
=
CorrectMap
()
# start new with empty CorrectMap
log
.
debug
(
'
Responders: %s
'
%
self
.
responders
)
#
log.debug('Responders: %s' % self.responders)
for
responder
in
self
.
responders
.
values
():
results
=
responder
.
evaluate_answers
(
answers
,
oldcmap
)
# call the responsetype instance to do the actual grading
newcmap
.
update
(
results
)
self
.
correct_map
=
newcmap
log
.
debug
(
'
%s: in grade_answers, answers=%s, cmap=%s
'
%
(
self
,
answers
,
newcmap
))
#
log.debug('%s: in grade_answers, answers=%s, cmap=%s' % (self,answers,newcmap))
return
newcmap
def
get_question_answers
(
self
):
...
...
This diff is collapsed.
Click to expand it.
common/lib/xmodule/tests.py
+
17
−
17
View file @
f4a3c544
#
# unittests for
courseware
# unittests for
xmodule (and capa)
#
# Note: run this using a like like this:
#
# django-admin.py test --settings=envs.test_ike --pythonpath=. co
ursewar
e
# django-admin.py test --settings=
lms.
envs.test_ike --pythonpath=. co
mmon/lib/xmodul
e
import
unittest
import
os
...
...
@@ -96,31 +96,31 @@ class MultiChoiceTest(unittest.TestCase):
multichoice_file
=
os
.
path
.
dirname
(
__file__
)
+
"
/test_files/multichoice.xml
"
test_lcp
=
lcp
.
LoncapaProblem
(
open
(
multichoice_file
),
'
1
'
,
system
=
i4xs
)
correct_answers
=
{
'
1_2_1
'
:
'
choice_foil3
'
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
false_answers
=
{
'
1_2_1
'
:
'
choice_foil2
'
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
def
test_MC_bare_grades
(
self
):
multichoice_file
=
os
.
path
.
dirname
(
__file__
)
+
"
/test_files/multi_bare.xml
"
test_lcp
=
lcp
.
LoncapaProblem
(
open
(
multichoice_file
),
'
1
'
,
system
=
i4xs
)
correct_answers
=
{
'
1_2_1
'
:
'
choice_2
'
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
false_answers
=
{
'
1_2_1
'
:
'
choice_1
'
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
def
test_TF_grade
(
self
):
truefalse_file
=
os
.
path
.
dirname
(
__file__
)
+
"
/test_files/truefalse.xml
"
test_lcp
=
lcp
.
LoncapaProblem
(
open
(
truefalse_file
),
'
1
'
,
system
=
i4xs
)
correct_answers
=
{
'
1_2_1
'
:[
'
choice_foil2
'
,
'
choice_foil1
'
]}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
false_answers
=
{
'
1_2_1
'
:[
'
choice_foil1
'
]}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
false_answers
=
{
'
1_2_1
'
:[
'
choice_foil1
'
,
'
choice_foil3
'
]}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
false_answers
=
{
'
1_2_1
'
:[
'
choice_foil3
'
]}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
false_answers
=
{
'
1_2_1
'
:[
'
choice_foil1
'
,
'
choice_foil2
'
,
'
choice_foil3
'
]}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
false_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
class
ImageResponseTest
(
unittest
.
TestCase
):
def
test_ir_grade
(
self
):
...
...
@@ -131,8 +131,8 @@ class ImageResponseTest(unittest.TestCase):
test_answers
=
{
'
1_2_1
'
:
'
[500,20]
'
,
'
1_2_2
'
:
'
[250,300]
'
,
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
[
'
1_2_2
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
.
get_correctness
(
'
1_2_2
'
)
,
'
incorrect
'
)
class
SymbolicResponseTest
(
unittest
.
TestCase
):
def
test_sr_grade
(
self
):
...
...
@@ -220,8 +220,8 @@ class SymbolicResponseTest(unittest.TestCase):
</mstyle>
</math>
'''
,
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
wrong_answers
)
[
'
1_2_1
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
correct_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
wrong_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
incorrect
'
)
class
OptionResponseTest
(
unittest
.
TestCase
):
'''
...
...
@@ -237,8 +237,8 @@ class OptionResponseTest(unittest.TestCase):
test_answers
=
{
'
1_2_1
'
:
'
True
'
,
'
1_2_2
'
:
'
True
'
,
}
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
[
'
1_2_1
'
]
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
[
'
1_2_2
'
]
,
'
incorrect
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
.
get_correctness
(
'
1_2_1
'
)
,
'
correct
'
)
self
.
assertEquals
(
test_lcp
.
grade_answers
(
test_answers
)
.
get_correctness
(
'
1_2_2
'
)
,
'
incorrect
'
)
#-----------------------------------------------------------------------------
# Grading tests
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment