Typos make the world go round
This commit is contained in:
122
starfields_drf_generics/cache_mixins.py
Normal file
122
starfields_drf_generics/cache_mixins.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from libraries.utils import sorted_params_string
|
||||
|
||||
# TODO classes below that involve create, update, destroy don't delete the caches properly, they need a regex cache delete
|
||||
# TODO put more reasonable asserts and feedback
|
||||
|
||||
# Mixin classes that provide cache functionalities
|
||||
class CacheUniqueUrl:
|
||||
def get_cache_unique_url(self, request):
|
||||
""" Create the query to be cached in a unique way to avoid duplicates. """
|
||||
if not hasattr(self, 'filters_string'):
|
||||
# Only assign the attribute if it's not already assigned
|
||||
filters = {}
|
||||
if self.extra_filters_dict:
|
||||
filters.update(self.extra_filters_dict)
|
||||
# Check if the url parameters have any of the keys of the extra filters and if so assign them
|
||||
for key in self.extra_filters_dict:
|
||||
if key in self.request.query_params.keys():
|
||||
filters[key] = self.request.query_params[key].replace(' ','').split(',')
|
||||
# Check if they're resolved in the urlconf as well
|
||||
if key in self.kwargs.keys():
|
||||
filters[key] = [self.kwargs[key]]
|
||||
|
||||
if hasattr(self, 'paged'):
|
||||
if self.paged:
|
||||
filters.update({'limit': [self.default_page_size], 'offset': [0]})
|
||||
if 'limit' in self.request.query_params.keys():
|
||||
filters.update({'limit': [self.request.query_params['limit']]})
|
||||
if 'offset' in self.request.query_params.keys():
|
||||
filters.update({'offset': [self.request.query_params['offset']]})
|
||||
for backend in list(self.filter_backends):
|
||||
filters.update(backend().get_filters_dict(request, self))
|
||||
self.filters_string = sorted_params_string(filters)
|
||||
|
||||
|
||||
class CacheGetMixin(CacheUniqueUrl):
|
||||
cache_prefix = None
|
||||
cache_vary_on_user = False
|
||||
cache_timeout_mins = None
|
||||
default_page_size = 20
|
||||
extra_filters_dict = None
|
||||
|
||||
def get_cache(self, request):
|
||||
assert self.cache_prefix is not None, (
|
||||
"'%s' should include a `cache_prefix` attribute"
|
||||
% self.__class__.__name__
|
||||
)
|
||||
|
||||
self.get_cache_unique_url(request)
|
||||
|
||||
# Attempt to get the response from the cache for the whole request
|
||||
try:
|
||||
if self.cache_vary_on_user:
|
||||
cache_attempt = self.cache.get(f"{self.cache_prefix}.{request.user}.{self.filters_string}")
|
||||
else:
|
||||
cache_attempt = self.cache.get(f"{self.cache_prefix}.{self.filters_string}")
|
||||
except:
|
||||
self.logger.info(f"Cache get attempt for {self.__class__.__name__} failed.")
|
||||
cache_attempt = None
|
||||
|
||||
if cache_attempt:
|
||||
return cache_attempt
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class CacheSetMixin(CacheUniqueUrl):
|
||||
cache_prefix = None
|
||||
cache_vary_on_user = False
|
||||
cache_timeout_mins = None
|
||||
default_page_size = 20
|
||||
extra_filters_dict = None
|
||||
|
||||
def set_cache(self, request, response):
|
||||
self.get_cache_unique_url(request)
|
||||
|
||||
# Create a function that programmatically defines the caching function
|
||||
def make_caching_function(cls, request, cache):
|
||||
def caching_function(response):
|
||||
# Writes the response to the cache
|
||||
try:
|
||||
if self.cache_vary_on_user:
|
||||
self.cache.set(key=f"{self.cache_prefix}.{request.user}.{self.filters_string}",
|
||||
value = response.data,
|
||||
timeout=60*self.cache_timeout_mins)
|
||||
else:
|
||||
self.cache.set(key=f"{self.cache_prefix}.{self.filters_string}",
|
||||
value = response.data,
|
||||
timeout=60*self.cache_timeout_mins)
|
||||
except:
|
||||
self.logger.exception(f"Cache set attempt for {self.__class__.__name__} failed.")
|
||||
return caching_function
|
||||
|
||||
# Register the post rendering hook to the response
|
||||
caching_function = make_caching_function(self, request, self.cache)
|
||||
response.add_post_render_callback(caching_function)
|
||||
|
||||
|
||||
class CacheDeleteMixin(CacheUniqueUrl):
|
||||
cache_delete = True
|
||||
cache_prefix = None
|
||||
cache_vary_on_user = False
|
||||
cache_timeout_mins = None
|
||||
extra_filters_dict = None
|
||||
|
||||
def delete_cache(self, request):
|
||||
# Handle the caching
|
||||
if self.cache_delete:
|
||||
# Create the query to be cached in a unique way to avoid duplicates
|
||||
self.get_cache_unique_url(request)
|
||||
|
||||
assert self.cache_prefix is not None, (
|
||||
f"{self.__class__.__name__} should include a `cache_prefix` attribute"
|
||||
)
|
||||
|
||||
# Delete the cache since a new entry has been created
|
||||
try:
|
||||
if self.cache_vary_on_user:
|
||||
self.cache.delete(f"{self.cache_prefix}.{request.user}.{self.filters_string}")
|
||||
else:
|
||||
self.cache.delete(f"{self.cache_prefix}.{self.filters_string}")
|
||||
except:
|
||||
self.logger.exception(f"Cache delete attempt for {self.__class__.__name__} failed.")
|
||||
Reference in New Issue
Block a user