diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py
index caa349125d3884bcaacf08c03a5943516a11c811..76f54bb12cb5c5d9fc07f1008ac0cd42e7254d48 100644
--- a/lms/djangoapps/open_ended_grading/peer_grading_service.py
+++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py
@@ -88,7 +88,7 @@ class PeerGradingService(GradingService):
                 {'location': problem_location, 'grader_id': grader_id})
         return json.dumps(self._render_rubric(response))
 
-    def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores):
+    def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged):
         data = {'grader_id' : grader_id,
                 'submission_id' : submission_id,
                 'score' : score,
@@ -96,7 +96,8 @@ class PeerGradingService(GradingService):
                 'submission_key': submission_key,
                 'location': location,
                 'rubric_scores': rubric_scores,
-                'rubric_scores_complete': True}
+                'rubric_scores_complete': True,
+                'submission_flagged' : submission_flagged}
         return self.post(self.save_grade_url, data)
 
     def is_student_calibrated(self, problem_location, grader_id):
@@ -224,7 +225,7 @@ def save_grade(request, course_id):
         error: if there was an error in the submission, this is the error message
     """
     _check_post(request)
-    required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]'])
+    required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged'])
     success, message = _check_required(request, required)
     if not success:
         return _err_response(message)
@@ -236,9 +237,10 @@ def save_grade(request, course_id):
     feedback = p['feedback']
     submission_key = p['submission_key']
     rubric_scores = p.getlist('rubric_scores[]')
+    submission_flagged = p['submission_flagged']
     try:
         response = peer_grading_service().save_grade(location, grader_id, submission_id, 
-                score, feedback, submission_key, rubric_scores)
+                score, feedback, submission_key, rubric_scores, submission_flagged)
         return HttpResponse(response, mimetype="application/json")
     except GradingServiceError:
         log.exception("""Error saving grade.  server url: {0}, location: {1}, submission_id:{2}, 
diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee
index c4b87eb30e376458a7fa77f9881ef890d4d0f16f..ab16b34d12c26bf6e6210f05ee743d5fe9c3f56f 100644
--- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee
+++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee
@@ -175,6 +175,7 @@ class PeerGradingProblem
     @submission_container = $('.submission-container')
     @prompt_container = $('.prompt-container')
     @rubric_container = $('.rubric-container')
+    @flag_student_container = $('.flag-student-container')
     @calibration_panel = $('.calibration-panel')
     @grading_panel = $('.grading-panel')
     @content_panel = $('.content-panel')
@@ -201,6 +202,7 @@ class PeerGradingProblem
     @action_button = $('.action-button')
     @calibration_feedback_button = $('.calibration-feedback-button')
     @interstitial_page_button = $('.interstitial-page-button')
+    @flag_student_checkbox = $('.flag-checkbox')
 
     Collapsible.setCollapsibles(@content_panel)
 
@@ -252,7 +254,8 @@ class PeerGradingProblem
       location: @location
       submission_id: @essay_id_input.val()
       submission_key: @submission_key_input.val()
-      feedback: @feedback_area.val() 
+      feedback: @feedback_area.val()
+      submission_flagged: @flag_student_checkbox.is(':checked')
     return data
 
 
@@ -352,7 +355,7 @@ class PeerGradingProblem
       @grading_panel.find('.calibration-text').show()
       @calibration_panel.find('.grading-text').hide()
       @grading_panel.find('.grading-text').hide()
-
+      @flag_student_container.hide()
 
       @submit_button.unbind('click')
       @submit_button.click @submit_calibration_essay
@@ -379,6 +382,7 @@ class PeerGradingProblem
       @grading_panel.find('.calibration-text').hide()
       @calibration_panel.find('.grading-text').show()
       @grading_panel.find('.grading-text').show()
+      @flag_student_container.show()
 
       @submit_button.unbind('click')
       @submit_button.click @submit_grade
diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html
index cb9ed1c0fbd317af0ab7a45e5bb6d396d5d92ffb..04ee7415ecd9f409d74701a0419ec1e3b623b619 100644
--- a/lms/templates/peer_grading/peer_grading_problem.html
+++ b/lms/templates/peer_grading/peer_grading_problem.html
@@ -72,6 +72,7 @@
           </p>
           <textarea name="feedback" placeholder="Feedback for student (optional)"
                     class="feedback-area" cols="70" ></textarea>
+          <p class="flag-student-container">Flag this submission for review by course staff (use if the submission contains inappropriate content): <input type="checkbox" class="flag-checkbox" value="student_is_flagged"></p>
         </div>