Skip to content
Snippets Groups Projects
Unverified Commit b501cc19 authored by David Ormsbee's avatar David Ormsbee Committed by GitHub
Browse files

Merge pull request #23031 from edx/revert-22779-ormsbee/revert_context_processor_cache

Re-enable caching of context processor output
parents 459cecc4 5ce6cecf
No related branches found
No related tags found
No related merge requests found
......@@ -15,6 +15,7 @@
from django.conf import settings
from django.template import Context, engines
from edx_django_utils.cache import RequestCache
from mako.template import Template as MakoTemplate
from six import text_type
......@@ -45,9 +46,27 @@ class Template(object):
"""
This takes a render call with a context (from Django) and translates
it to a render call on the mako template.
When rendering a large sequence of XBlocks, we may end up rendering
hundreds of small templates. Even if context processors aren't very
expensive individually, they will quickly add up in that situation. To
help guard against this, we do context processing once for a given
request and then cache it.
"""
context_object = self._get_context_object(request)
context_dictionary = self._get_context_processors_output_dict(context_object)
request_cache = RequestCache('context_processors')
cache_response = request_cache.get_cached_response('cp_output')
if cache_response.is_found:
context_dictionary = dict(cache_response.value)
else:
context_dictionary = self._get_context_processors_output_dict(context_object)
# The context_dictionary is later updated with template specific
# variables. There are potentially hundreds of calls to templates
# rendering and we don't want them to interfere with each other, so
# we make a copy from the output of the context processors and then
# recreate a new dict every time we pull from the cache.
request_cache.set('cp_output', dict(context_dictionary))
if isinstance(context, Context):
context_dictionary.update(context.flatten())
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment