Skip to content
Snippets Groups Projects
Unverified Commit 01b6b19f authored by Feanil Patel's avatar Feanil Patel Committed by GitHub
Browse files

Merge pull request #22454 from edx/feanil/handle_pickling_failure

Feanil/handle pickling failure
parents fa24ed8e f485ff45
Branches
Tags release-2019-12-10-10.15
No related merge requests found
......@@ -226,24 +226,30 @@ class CourseStructureCache(object):
return None
with TIMER.timer("CourseStructureCache.get", course_context) as tagger:
compressed_pickled_data = self.cache.get(key)
tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
if compressed_pickled_data is None:
# Always log cache misses, because they are unexpected
tagger.sample_rate = 1
try:
compressed_pickled_data = self.cache.get(key)
tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
if compressed_pickled_data is None:
# Always log cache misses, because they are unexpected
tagger.sample_rate = 1
return None
tagger.measure('compressed_size', len(compressed_pickled_data))
pickled_data = zlib.decompress(compressed_pickled_data)
tagger.measure('uncompressed_size', len(pickled_data))
if six.PY2:
return pickle.loads(pickled_data)
else:
return pickle.loads(pickled_data, encoding='latin-1')
except Exception:
# The cached data is corrupt in some way, get rid of it.
log.warning("CourseStructureCache: Bad data in cache for %s", course_context)
self.cache.delete(key)
return None
tagger.measure('compressed_size', len(compressed_pickled_data))
pickled_data = zlib.decompress(compressed_pickled_data)
tagger.measure('uncompressed_size', len(pickled_data))
if six.PY2:
return pickle.loads(pickled_data)
else:
return pickle.loads(pickled_data, encoding='latin-1')
def set(self, key, structure, course_context=None):
"""Given a structure, will pickle, compress, and write to cache."""
if self.cache is None:
......
......@@ -977,6 +977,15 @@ class TestCourseStructureCache(SplitModuleTest):
# now make sure that you get the same structure
self.assertEqual(cached_structure, not_cached_structure)
# If data is corrupted, get it from mongo again.
cache_key = self.new_course.id.version_guid
self.cache.set(cache_key, b"bad_data")
with check_mongo_calls(1):
not_corrupt_structure = self._get_structure(self.new_course)
# now make sure that you get the same structure
self.assertEqual(not_corrupt_structure, not_cached_structure)
@patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache')
def test_course_structure_cache_no_cache_configured(self, mock_get_cache):
mock_get_cache.side_effect = InvalidCacheBackendError
......
......@@ -207,7 +207,15 @@ class BlockStructureStore(object):
"""
Deserializes the given data and returns the parsed block_structure.
"""
block_relations, transformer_data, block_data_map = zunpickle(serialized_data)
try:
block_relations, transformer_data, block_data_map = zunpickle(serialized_data)
except Exception:
# Somehow failed to de-serialized the data, assume it's corrupt.
bs_model = self._get_model(root_block_usage_key)
logger.exception(u"BlockStructure: Failed to load data from cache for %s", bs_model)
raise BlockStructureNotFound(bs_model.data_usage_key)
return BlockStructureFactory.create_new(
root_block_usage_key,
block_relations,
......
......@@ -56,11 +56,17 @@ def get_edx_api_data(api_config, resource, api, resource_id=None, querystring=No
cached = cache.get(cache_key)
if cached:
cached_response = zunpickle(cached)
if fields:
cached_response = get_fields(fields, cached_response)
return cached_response
try:
cached_response = zunpickle(cached)
except Exception: # pylint: disable=broad-except
# Data is corrupt in some way.
log.warning("Data for cache is corrupt for cache key %s", cache_key)
cache.delete(cache_key)
else:
if fields:
cached_response = get_fields(fields, cached_response)
return cached_response
try:
endpoint = getattr(api, resource)
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment