diff --git a/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py b/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py
index 928566e179014448c2d2d67c8725f8b8390c37ad..cdfd854d094e523d40163bfb703c9d6449a1267a 100644
--- a/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py
+++ b/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py
@@ -226,24 +226,30 @@ class CourseStructureCache(object):
             return None
 
         with TIMER.timer("CourseStructureCache.get", course_context) as tagger:
-            compressed_pickled_data = self.cache.get(key)
-            tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
-
-            if compressed_pickled_data is None:
-                # Always log cache misses, because they are unexpected
-                tagger.sample_rate = 1
+            try:
+                compressed_pickled_data = self.cache.get(key)
+                tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
+
+                if compressed_pickled_data is None:
+                    # Always log cache misses, because they are unexpected
+                    tagger.sample_rate = 1
+                    return None
+
+                tagger.measure('compressed_size', len(compressed_pickled_data))
+
+                pickled_data = zlib.decompress(compressed_pickled_data)
+                tagger.measure('uncompressed_size', len(pickled_data))
+
+                if six.PY2:
+                    return pickle.loads(pickled_data)
+                else:
+                    return pickle.loads(pickled_data, encoding='latin-1')
+            except Exception:
+                # The cached data is corrupt in some way, get rid of it.
+                log.warning("CourseStructureCache: Bad data in cache for %s", course_context)
+                self.cache.delete(key)
                 return None
 
-            tagger.measure('compressed_size', len(compressed_pickled_data))
-
-            pickled_data = zlib.decompress(compressed_pickled_data)
-            tagger.measure('uncompressed_size', len(pickled_data))
-
-            if six.PY2:
-                return pickle.loads(pickled_data)
-            else:
-                return pickle.loads(pickled_data, encoding='latin-1')
-
     def set(self, key, structure, course_context=None):
         """Given a structure, will pickle, compress, and write to cache."""
         if self.cache is None:
diff --git a/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py b/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py
index 6b72e27e81f36bd3f35a281bdba3b12b1360e51a..c12915a620819926f29d0c337ecb7e3b054e9721 100644
--- a/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py
+++ b/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py
@@ -977,6 +977,15 @@ class TestCourseStructureCache(SplitModuleTest):
         # now make sure that you get the same structure
         self.assertEqual(cached_structure, not_cached_structure)
 
+        # If data is corrupted, get it from mongo again.
+        cache_key = self.new_course.id.version_guid
+        self.cache.set(cache_key, b"bad_data")
+        with check_mongo_calls(1):
+            not_corrupt_structure = self._get_structure(self.new_course)
+
+        # now make sure that you get the same structure
+        self.assertEqual(not_corrupt_structure, not_cached_structure)
+
     @patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache')
     def test_course_structure_cache_no_cache_configured(self, mock_get_cache):
         mock_get_cache.side_effect = InvalidCacheBackendError
diff --git a/openedx/core/djangoapps/content/block_structure/store.py b/openedx/core/djangoapps/content/block_structure/store.py
index c6568f031f699e2dcf071b63a8cc368c19fdf7c2..b35b92f61be75e91a257678bab5fd6cc3a1a4f4c 100644
--- a/openedx/core/djangoapps/content/block_structure/store.py
+++ b/openedx/core/djangoapps/content/block_structure/store.py
@@ -207,7 +207,15 @@ class BlockStructureStore(object):
         """
         Deserializes the given data and returns the parsed block_structure.
         """
-        block_relations, transformer_data, block_data_map = zunpickle(serialized_data)
+
+        try:
+            block_relations, transformer_data, block_data_map = zunpickle(serialized_data)
+        except Exception:
+            # Somehow failed to de-serialized the data, assume it's corrupt.
+            bs_model = self._get_model(root_block_usage_key)
+            logger.exception(u"BlockStructure: Failed to load data from cache for %s", bs_model)
+            raise BlockStructureNotFound(bs_model.data_usage_key)
+
         return BlockStructureFactory.create_new(
             root_block_usage_key,
             block_relations,
diff --git a/openedx/core/lib/edx_api_utils.py b/openedx/core/lib/edx_api_utils.py
index 0f1fab26a180f9e8b3b6c2934c16fafe7c6db9cc..237c7b5ac5859cff2290fd9f8f1ba3a0ad551abe 100644
--- a/openedx/core/lib/edx_api_utils.py
+++ b/openedx/core/lib/edx_api_utils.py
@@ -56,11 +56,17 @@ def get_edx_api_data(api_config, resource, api, resource_id=None, querystring=No
 
         cached = cache.get(cache_key)
         if cached:
-            cached_response = zunpickle(cached)
-            if fields:
-                cached_response = get_fields(fields, cached_response)
-
-            return cached_response
+            try:
+                cached_response = zunpickle(cached)
+            except Exception:  # pylint: disable=broad-except
+                # Data is corrupt in some way.
+                log.warning("Data for cache is corrupt for cache key %s", cache_key)
+                cache.delete(cache_key)
+            else:
+                if fields:
+                    cached_response = get_fields(fields, cached_response)
+
+                return cached_response
 
     try:
         endpoint = getattr(api, resource)