Skip to content
Snippets Groups Projects
Unverified Commit dd3a0bd9 authored by Awais Jibran's avatar Awais Jibran Committed by GitHub
Browse files

Add missing import logs. (#27056)

parent d4bbd9d0
No related branches found
Tags release-2020-03-04-12.11
No related merge requests found
......@@ -122,6 +122,12 @@ def _write_chunk(request, courselike_key):
course_dir = data_root / subdir
filename = request.FILES['course-data'].name
def error_response(message, status):
"""
Returns Json error response
"""
return JsonResponse({'ErrMsg': message, 'Stage': -1}, status=status)
courselike_string = str(courselike_key) + filename
# Do everything in a try-except block to make sure everything is properly cleaned up.
try:
......@@ -130,13 +136,7 @@ def _write_chunk(request, courselike_key):
if not filename.endswith('.tar.gz'):
_save_request_status(request, courselike_string, -1)
return JsonResponse(
{
'ErrMsg': _('We only support uploading a .tar.gz file.'),
'Stage': -1
},
status=415
)
return error_response(_('We only support uploading a .tar.gz file.'), 415)
temp_filepath = course_dir / filename
if not course_dir.isdir():
......@@ -158,6 +158,11 @@ def _write_chunk(request, courselike_key):
mode = "wb+"
else:
mode = "ab+"
if not temp_filepath.exists():
_save_request_status(request, courselike_string, -1)
log.error(f'Course Import: {courselike_key} Chunks missed during upload.')
return error_response(_('Some chunks missed during file upload. Please try again'), 409)
size = os.path.getsize(temp_filepath)
# Check to make sure we haven't missed a chunk
# This shouldn't happen, even if different instances are handling
......@@ -167,13 +172,8 @@ def _write_chunk(request, courselike_key):
log.error(
f'Course import {courselike_key}: A chunk has been missed'
)
return JsonResponse(
{
'ErrMsg': _('File upload corrupted. Please try again'),
'Stage': -1
},
status=409
)
return error_response(_('File upload corrupted. Please try again'), 409)
# The last request sometimes comes twice. This happens because
# nginx sends a 499 error code when the response takes too long.
elif size > int(content_range['stop']) and size == int(content_range['end']):
......@@ -213,13 +213,7 @@ def _write_chunk(request, courselike_key):
log.info("Course import %s: Temp data cleared", courselike_key)
log.exception(f'Course import {courselike_key}: error importing course.')
return JsonResponse(
{
'ErrMsg': str(exception),
'Stage': -1
},
status=400
)
return error_response(str(exception), 400)
return JsonResponse({'ImportStatus': 1})
......
......@@ -377,9 +377,7 @@ class XMLModuleStore(ModuleStoreReadBase):
try:
course_descriptor = self.load_course(course_dir, course_ids, errorlog.tracker, target_course_id)
except Exception as exc: # pylint: disable=broad-except
msg = "ERROR: Failed to load courselike '{}': {}".format(
course_dir.encode("utf-8"), str(exc)
)
msg = f'Course import {target_course_id}: ERROR: Failed to load courselike "{course_dir}": {str(exc)}'
set_custom_attribute('course_import_failure', f"Courselike load failure: {msg}")
log.exception(msg)
errorlog.tracker(msg)
......@@ -436,7 +434,7 @@ class XMLModuleStore(ModuleStoreReadBase):
returns a CourseBlock for the course
"""
log.debug('========> Starting courselike import from %s', course_dir)
log.info(f'Course import {target_course_id}: Starting courselike import from {course_dir}')
with open(self.data_dir / course_dir / self.parent_xml) as course_file:
course_data = etree.parse(course_file, parser=edx_xml_parser).getroot()
......@@ -531,7 +529,7 @@ class XMLModuleStore(ModuleStoreReadBase):
self.content_importers(system, course_descriptor, course_dir, url_name)
log.debug('========> Done with courselike import from %s', course_dir)
log.info(f'Course import {target_course_id}: Done with courselike import from {course_dir}')
return course_descriptor
def content_importers(self, system, course_descriptor, course_dir, url_name):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment