Adds multi-database support.

This commit is contained in:
Bertrand Bordage 2014-10-20 23:43:10 +02:00
parent 65a8008073
commit 19f306054d
6 changed files with 71 additions and 47 deletions

View file

@ -55,7 +55,7 @@ Setting Default value Description
==================== ============= ============================================
``CACHALOT_ENABLED`` ``True`` If set to ``False``, disables SQL caching
but keeps invalidating to avoid stale cache
``CACHALOT_CACHE`` ``'default'`` Name of the cache from |CACHES|_ used by
``CACHALOT_CACHE`` ``'default'`` Alias of the cache from |CACHES|_ used by
django-cachalot
==================== ============= ============================================
@ -73,7 +73,7 @@ or simply by changing its attributes:
with cachalot_settings(CACHALOT_ENABLED=False):
# SQL queries are not cached in this block
@cachalot_settings(CACHALOT_CACHE='another_cache')
@cachalot_settings(CACHALOT_CACHE='another_alias')
def your_function():
# Whats in this function uses another cache
@ -150,7 +150,7 @@ What still needs to be done
For version 1.0
...............
- Handle multiple databases
- Write tests for complex multi-databases scenarii
- Write tests for `multi-table inheritance <https://docs.djangoproject.com/en/1.7/topics/db/models/#multi-table-inheritance>`_
- Add invalidation on migrations in Django 1.7 (& South?)

View file

@ -6,7 +6,7 @@ from threading import local
from django.conf import settings
# TODO: Replace with caches[CACHALOT_CACHE] when we drop Django 1.6 support.
from django.core.cache import get_cache as get_django_cache
from django.db import connection
from django.db import connections
from .settings import cachalot_settings
from .transaction import AtomicCache
@ -20,20 +20,20 @@ class CacheHandler(local):
self._atomic_caches = []
return self._atomic_caches
def get_atomic_cache(self, cache_name, level):
if cache_name not in self.atomic_caches[level]:
self.atomic_caches[level][cache_name] = AtomicCache(
self.get_cache(cache_name, level-1))
return self.atomic_caches[level][cache_name]
def get_atomic_cache(self, cache_alias, level):
if cache_alias not in self.atomic_caches[level]:
self.atomic_caches[level][cache_alias] = AtomicCache(
self.get_cache(cache_alias, level-1))
return self.atomic_caches[level][cache_alias]
def get_cache(self, cache_name=None, atomic_level=-1):
if cache_name is None:
cache_name = cachalot_settings.CACHALOT_CACHE
def get_cache(self, cache_alias=None, atomic_level=-1):
if cache_alias is None:
cache_alias = cachalot_settings.CACHALOT_CACHE
min_level = -len(self.atomic_caches)
if atomic_level < min_level:
return get_django_cache(cache_name)
return self.get_atomic_cache(cache_name, atomic_level)
return get_django_cache(cache_alias)
return self.get_atomic_cache(cache_alias, atomic_level)
def enter_atomic(self):
self.atomic_caches.append({})
@ -44,17 +44,21 @@ class CacheHandler(local):
for atomic_cache in atomic_caches:
atomic_cache.commit()
def clear(self, cache_name):
tables = connection.introspection.table_names()
tables_cache_keys = [_get_table_cache_key(t) for t in tables]
def clear(self, cache_alias, db_alias):
tables = connections[db_alias].introspection.table_names()
tables_cache_keys = [_get_table_cache_key(db_alias, t) for t in tables]
for atomic_level in range(-(len(self.atomic_caches)+1), 0):
cache = self.get_cache(cache_name, atomic_level)
cache = self.get_cache(cache_alias, atomic_level)
_invalidate_tables_cache_keys(cache, tables_cache_keys)
def clear_all_for_db(self, db_alias):
for cache_alias in settings.CACHES:
self.clear(cache_alias, db_alias)
def clear_all(self):
for cache_name in settings.CACHES:
self.clear(cache_name)
for db_alias in settings.DATABASES:
self.clear_all_for_db(db_alias)
cachalot_caches = CacheHandler()

View file

@ -6,13 +6,14 @@ from functools import wraps
import pickle
import re
from django.db import connection
from django.conf import settings
from django.db import connections
from django.db.models.query import EmptyResultSet
from django.db.models.sql.compiler import (
SQLCompiler, SQLAggregateCompiler, SQLDateCompiler, SQLDateTimeCompiler,
SQLInsertCompiler, SQLUpdateCompiler, SQLDeleteCompiler)
from django.db.models.sql.where import ExtraWhere
from django.db.transaction import Atomic
from django.db.transaction import Atomic, get_connection
from django.test import TransactionTestCase
from .cache import cachalot_caches
@ -89,7 +90,7 @@ def _patch_orm_read():
and not isinstance(result, (tuple, list)):
result = list(result)
_update_tables_queries(cache, query, cache_key)
_update_tables_queries(cache, compiler, cache_key)
cache.set(cache_key, pickle.dumps(result))
else:
@ -108,7 +109,7 @@ def _patch_orm_write():
def patch_execute_sql(original):
@wraps(original)
def inner(compiler, *args, **kwargs):
_invalidate_tables(cachalot_caches.get_cache(), compiler.query)
_invalidate_tables(cachalot_caches.get_cache(), compiler)
return original(compiler, *args, **kwargs)
inner.original = original
@ -131,7 +132,7 @@ def _patch_atomic():
def patch_exit(original):
@wraps(original)
def inner(self, exc_type, exc_value, traceback):
needs_rollback = connection.needs_rollback
needs_rollback = get_connection(self.using).needs_rollback
original(self, exc_type, exc_value, traceback)
cachalot_caches.exit_atomic(exc_type is None
and not needs_rollback)
@ -144,16 +145,33 @@ def _patch_atomic():
def _patch_tests():
def patch_before(original):
def patch_create_test_db(original, db_alias):
@wraps(original)
def inner(*args, **kwargs):
cachalot_caches.clear_all()
out = original(*args, **kwargs)
cachalot_caches.clear_all_for_db(db_alias)
return out
inner.original = original
return inner
def patch_destroy_test_db(original, db_alias):
@wraps(original)
def inner(*args, **kwargs):
cachalot_caches.clear_all_for_db(db_alias)
return original(*args, **kwargs)
inner.original = original
return inner
def patch_after(original):
for db_alias in settings.DATABASES:
creation = connections[db_alias].creation
creation.create_test_db = patch_create_test_db(
creation.create_test_db, db_alias)
creation.destroy_test_db = patch_destroy_test_db(
creation.destroy_test_db, db_alias)
def patch_transaction_test_case(original):
@wraps(original)
def inner(*args, **kwargs):
out = original(*args, **kwargs)
@ -163,12 +181,9 @@ def _patch_tests():
inner.original = original
return inner
creation = connection.creation
creation.create_test_db = patch_after(creation.create_test_db)
creation.destroy_test_db = patch_before(creation.destroy_test_db)
TransactionTestCase._fixture_setup = patch_after(
TransactionTestCase._fixture_setup = patch_transaction_test_case(
TransactionTestCase._fixture_setup)
TransactionTestCase._fixture_teardown = patch_after(
TransactionTestCase._fixture_teardown = patch_transaction_test_case(
TransactionTestCase._fixture_teardown)

View file

@ -444,9 +444,11 @@ class ReadTestCase(TransactionTestCase):
data1 = list(Test.objects.all())
self.assertListEqual(data1, [self.t1, self.t2])
other_cache = [k for k in settings.DATABASES if k != 'default'][0]
with self.assertNumQueries(1):
data2 = list(Test.objects.using(other_cache))
other_cache_alias = [alias for alias in settings.DATABASES
if alias != 'default'][0]
with self.assertNumQueries(1, using=other_cache_alias):
data2 = list(Test.objects.using(other_cache_alias))
self.assertListEqual(data2, [])
@skipUnlessDBFeature('has_select_for_update')

View file

@ -58,9 +58,10 @@ class SettingsTestCase(TransactionTestCase):
with self.assertNumQueries(0):
list(Test.objects.all())
other_cache = [k for k in settings.CACHES if k != 'default'][0]
other_cache_alias = [alias for alias in settings.CACHES
if alias != 'default'][0]
with cachalot_settings(CACHALOT_CACHE=other_cache):
with cachalot_settings(CACHALOT_CACHE=other_cache_alias):
with self.assertNumQueries(1):
list(Test.objects.all())
with self.assertNumQueries(0):

View file

@ -9,7 +9,8 @@ def hash_cache_key(unicode_key):
def _get_query_cache_key(compiler):
return hash_cache_key('%s:%s' % compiler.as_sql())
sql, params = compiler.as_sql()
return hash_cache_key('%s:%s:%s' % (compiler.using, sql, params))
def _get_tables(query):
@ -21,16 +22,17 @@ def _get_tables(query):
return tables
def _get_table_cache_key(table):
return hash_cache_key('%s_queries' % table)
def _get_table_cache_key(db_alias, table):
return hash_cache_key('%s:%s:queries' % (db_alias, table))
def _get_tables_cache_keys(query):
return [_get_table_cache_key(t) for t in _get_tables(query)]
def _get_tables_cache_keys(compiler):
return [_get_table_cache_key(compiler.using, t)
for t in _get_tables(compiler.query)]
def _update_tables_queries(cache, query, cache_key):
tables_cache_keys = _get_tables_cache_keys(query)
def _update_tables_queries(cache, compiler, cache_key):
tables_cache_keys = _get_tables_cache_keys(compiler)
tables_queries = cache.get_many(tables_cache_keys)
for k in tables_cache_keys:
queries = tables_queries.get(k, [])
@ -47,6 +49,6 @@ def _invalidate_tables_cache_keys(cache, tables_cache_keys):
cache.delete_many(queries + tables_cache_keys)
def _invalidate_tables(cache, query):
tables_cache_keys = _get_tables_cache_keys(query)
def _invalidate_tables(cache, compiler):
tables_cache_keys = _get_tables_cache_keys(compiler)
_invalidate_tables_cache_keys(cache, tables_cache_keys)