Added async support (#656)
Some checks failed
Docs / docs (push) Has been cancelled
Test / ruff-format (push) Has been cancelled
Test / ruff-lint (push) Has been cancelled
Test / build (3.10) (push) Has been cancelled
Test / build (3.11) (push) Has been cancelled
Test / build (3.12) (push) Has been cancelled
Test / build (3.13) (push) Has been cancelled
Test / build (3.14) (push) Has been cancelled
Test / build (3.8) (push) Has been cancelled
Test / build (3.9) (push) Has been cancelled

* Added async logic

* Added tests and fixed async deadlock on aset

* Used abstract base class for backend to simplify code coverage

* Reordered try except block

* Added explicit thread safety

* Fixed linting error

* Worked on redis init block

* Fixed async test setup

* Added tests for redis instantiation

* Fixed linting errors
This commit is contained in:
Philipp Thumfart 2026-03-04 23:37:37 +01:00 committed by GitHub
parent 675c9446cb
commit 4ac1e546c7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 1026 additions and 18 deletions

View file

@ -32,6 +32,7 @@ Merijn Bertels <merijn.bertels@gmail.com>
Omer Katz <omer.drow@gmail.com>
Petr Knap <dev@petrknap.cz>
Philip Neustrom <philipn@gmail.com>
Philipp Thumfart <philipp@thumfart.eu>
Pierre-Olivier Marec <pomarec@free.fr>
Roman Krejcik <farin@farin.cz>
Silvan Spross <silvan.spross@gmail.com>

View file

@ -1,21 +1,48 @@
"""Defines the base constance backend."""
from abc import ABC
from abc import abstractmethod
class Backend:
class Backend(ABC):
@abstractmethod
def get(self, key):
"""
Get the key from the backend store and return the value.
Return None if not found.
"""
raise NotImplementedError
...
@abstractmethod
async def aget(self, key):
"""
Get the key from the backend store and return the value.
Return None if not found.
"""
...
@abstractmethod
def mget(self, keys):
"""
Get the keys from the backend store and return a list of the values.
Return an empty list if not found.
"""
raise NotImplementedError
...
@abstractmethod
async def amget(self, keys):
"""
Get the keys from the backend store and return a list of the values.
Return an empty list if not found.
"""
...
@abstractmethod
def set(self, key, value):
"""Add the value to the backend store given the key."""
raise NotImplementedError
...
@abstractmethod
async def aset(self, key, value):
"""Add the value to the backend store given the key."""
...

View file

@ -85,6 +85,46 @@ class DatabaseBackend(Backend):
self._cache.add(key, value)
return value
async def aget(self, key):
from asgiref.sync import sync_to_async
prefixed_key = self.add_prefix(key)
value = None
if self._cache:
value = await self._cache.aget(prefixed_key)
if value is None:
await sync_to_async(self.autofill, thread_sensitive=True)()
value = await self._cache.aget(prefixed_key)
if value is None:
match = await self._model._default_manager.filter(key=prefixed_key).only("value").afirst()
if match:
value = loads(match.value)
if self._cache:
await self._cache.aadd(prefixed_key, value)
return value
async def amget(self, keys):
if not keys:
return {}
prefixed_keys_map = {self.add_prefix(key): key for key in keys}
results = {}
if self._cache:
cache_results = await self._cache.aget_many(prefixed_keys_map.keys())
for prefixed_key, value in cache_results.items():
results[prefixed_keys_map[prefixed_key]] = value
missing_prefixed_keys = [k for k in prefixed_keys_map if prefixed_keys_map[k] not in results]
if missing_prefixed_keys:
try:
async for const in self._model._default_manager.filter(key__in=missing_prefixed_keys):
results[prefixed_keys_map[const.key]] = loads(const.value)
except (OperationalError, ProgrammingError):
pass
return results
def set(self, key, value):
key = self.add_prefix(key)
created = False
@ -119,6 +159,13 @@ class DatabaseBackend(Backend):
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
async def aset(self, key, value):
from asgiref.sync import sync_to_async
# We use sync_to_async because Django's transaction.atomic() and database connections are thread-local.
# This ensures the operation runs in the correct database thread until native async transactions are supported.
return await sync_to_async(self.set, thread_sensitive=True)(key, value)
def clear(self, sender, instance, created, **kwargs):
if self._cache and not created:
keys = [self.add_prefix(k) for k in settings.CONFIG]

View file

@ -19,6 +19,10 @@ class MemoryBackend(Backend):
with self._lock:
return self._storage.get(key)
async def aget(self, key):
# Memory operations are fast enough that we don't need true async here
return self.get(key)
def mget(self, keys):
if not keys:
return None
@ -30,8 +34,18 @@ class MemoryBackend(Backend):
result.append((key, value))
return result
async def amget(self, keys):
if not keys:
return {}
with self._lock:
return {key: self._storage[key] for key in keys if key in self._storage}
def set(self, key, value):
with self._lock:
old_value = self._storage.get(key)
self._storage[key] = value
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
async def aset(self, key, value):
# Memory operations are fast enough that we don't need true async here
self.set(key, value)

View file

@ -1,3 +1,4 @@
import asyncio
from threading import RLock
from time import monotonic
@ -17,27 +18,59 @@ class RedisBackend(Backend):
super().__init__()
self._prefix = settings.REDIS_PREFIX
connection_cls = settings.REDIS_CONNECTION_CLASS
if connection_cls is not None:
async_connection_cls = settings.REDIS_ASYNC_CONNECTION_CLASS
if connection_cls:
self._rd = utils.import_module_attr(connection_cls)()
else:
try:
import redis
except ImportError:
raise ImproperlyConfigured("The Redis backend requires redis-py to be installed.") from None
else:
if isinstance(settings.REDIS_CONNECTION, str):
self._rd = redis.from_url(settings.REDIS_CONNECTION)
else:
self._rd = redis.Redis(**settings.REDIS_CONNECTION)
if async_connection_cls:
self._ard = utils.import_module_attr(async_connection_cls)()
else:
try:
import redis.asyncio as aredis
except ImportError:
# We set this to none instead of raising an error to indicate that async support is not available
# without breaking existing sync usage.
self._ard = None
else:
if isinstance(settings.REDIS_CONNECTION, str):
self._ard = aredis.from_url(settings.REDIS_CONNECTION)
else:
self._ard = aredis.Redis(**settings.REDIS_CONNECTION)
def add_prefix(self, key):
return f"{self._prefix}{key}"
def _check_async_support(self):
if self._ard is None:
raise ImproperlyConfigured(
"Async support for the Redis backend requires redis>=4.2.0 "
"or a custom CONSTANCE_REDIS_ASYNC_CONNECTION_CLASS to be configured."
)
def get(self, key):
value = self._rd.get(self.add_prefix(key))
if value:
return loads(value)
return None
async def aget(self, key):
self._check_async_support()
value = await self._ard.get(self.add_prefix(key))
if value:
return loads(value)
return None
def mget(self, keys):
if not keys:
return
@ -46,15 +79,37 @@ class RedisBackend(Backend):
if value:
yield key, loads(value)
async def amget(self, keys):
if not keys:
return {}
self._check_async_support()
prefixed_keys = [self.add_prefix(key) for key in keys]
values = await self._ard.mget(prefixed_keys)
return {key: loads(value) for key, value in zip(keys, values) if value}
def set(self, key, value):
old_value = self.get(key)
self._rd.set(self.add_prefix(key), dumps(value))
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
async def _aset_internal(self, key, value, old_value):
"""
Internal set operation. Separated to allow subclasses to provide old_value
without going through self.aget() which may have locking behavior.
"""
self._check_async_support()
await self._ard.set(self.add_prefix(key), dumps(value))
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
async def aset(self, key, value):
old_value = await self.aget(key)
await self._aset_internal(key, value, old_value)
class CachingRedisBackend(RedisBackend):
_sentinel = object()
_lock = RLock()
_async_lock = None # Lazy-initialized asyncio.Lock
def __init__(self):
super().__init__()
@ -62,6 +117,12 @@ class CachingRedisBackend(RedisBackend):
self._cache = {}
self._sentinel = object()
def _get_async_lock(self):
# Lazily create the asyncio lock to avoid issues with event loops
if self._async_lock is None:
self._async_lock = asyncio.Lock()
return self._async_lock
def _has_expired(self, value):
return value[0] <= monotonic()
@ -79,11 +140,41 @@ class CachingRedisBackend(RedisBackend):
return value[1]
async def _aget_unlocked(self, key):
"""
Get value with cache support but without acquiring lock.
Caller must already hold the lock.
"""
value = self._cache.get(key, self._sentinel)
if value is self._sentinel or self._has_expired(value):
new_value = await super().aget(key)
self._cache_value(key, new_value)
return new_value
return value[1]
async def aget(self, key):
value = self._cache.get(key, self._sentinel)
if value is self._sentinel or self._has_expired(value):
async with self._get_async_lock():
# Double-check after acquiring lock, then delegate to unlocked version
return await self._aget_unlocked(key)
return value[1]
def set(self, key, value):
with self._lock:
super().set(key, value)
self._cache_value(key, value)
async def aset(self, key, value):
async with self._get_async_lock():
# Use unlocked version since we already hold the lock
old_value = await self._aget_unlocked(key)
# Use internal method to avoid lock recursion (super().aset calls self.aget)
await self._aset_internal(key, value, old_value)
self._cache_value(key, value)
def mget(self, keys):
if not keys:
return
@ -91,3 +182,38 @@ class CachingRedisBackend(RedisBackend):
value = self.get(key)
if value is not None:
yield key, value
async def amget(self, keys):
if not keys:
return {}
results = {}
missing_keys = []
# First, check the local cache for all keys
for key in keys:
value = self._cache.get(key, self._sentinel)
if value is not self._sentinel and not self._has_expired(value):
results[key] = value[1]
else:
missing_keys.append(key)
# Fetch missing keys from Redis
if missing_keys:
async with self._get_async_lock():
# Re-check cache for keys that might have been fetched while waiting for lock
still_missing = []
for key in missing_keys:
value = self._cache.get(key, self._sentinel)
if value is not self._sentinel and not self._has_expired(value):
results[key] = value[1]
else:
still_missing.append(key)
if still_missing:
fetched = await super().amget(still_missing)
for key, value in fetched.items():
self._cache_value(key, value)
results[key] = value
return results

View file

@ -1,31 +1,149 @@
import asyncio
import warnings
from . import settings
from . import utils
class AsyncValueProxy:
def __init__(self, key, config, default):
self._key = key
self._config = config
self._default = default
self._value = None
self._fetched = False
def __await__(self):
return self._get_value().__await__()
async def _get_value(self):
if not self._fetched:
result = await self._config._backend.aget(self._key)
if result is None:
result = self._default
await self._config.aset(self._key, result)
self._value = result
self._fetched = True
return self._value
def _get_sync_value(self):
warnings.warn(
f"Synchronous access to Constance setting '{self._key}' inside an async loop. "
f"Use 'await config.{self._key}' instead.",
RuntimeWarning,
stacklevel=3,
)
return self._config._get_sync_value(self._key, self._default)
def __str__(self):
return str(self._get_sync_value())
def __repr__(self):
return repr(self._get_sync_value())
def __int__(self):
return int(self._get_sync_value())
def __float__(self):
return float(self._get_sync_value())
def __bool__(self):
return bool(self._get_sync_value())
def __eq__(self, other):
return self._get_sync_value() == other
def __ne__(self, other):
return self._get_sync_value() != other
def __lt__(self, other):
return self._get_sync_value() < other
def __le__(self, other):
return self._get_sync_value() <= other
def __gt__(self, other):
return self._get_sync_value() > other
def __ge__(self, other):
return self._get_sync_value() >= other
def __getitem__(self, key):
return self._get_sync_value()[key]
def __iter__(self):
return iter(self._get_sync_value())
def __len__(self):
return len(self._get_sync_value())
def __contains__(self, item):
return item in self._get_sync_value()
def __hash__(self):
return hash(self._get_sync_value())
def __add__(self, other):
return self._get_sync_value() + other
def __sub__(self, other):
return self._get_sync_value() - other
def __mul__(self, other):
return self._get_sync_value() * other
def __truediv__(self, other):
return self._get_sync_value() / other
class Config:
"""The global config wrapper that handles the backend."""
def __init__(self):
super().__setattr__("_backend", utils.import_module_attr(settings.BACKEND)())
def _get_sync_value(self, key, default):
result = self._backend.get(key)
if result is None:
result = default
setattr(self, key, default)
return result
def __getattr__(self, key):
if key == "_backend":
return super().__getattribute__(key)
try:
if len(settings.CONFIG[key]) not in (2, 3):
raise AttributeError(key)
default = settings.CONFIG[key][0]
except KeyError as e:
raise AttributeError(key) from e
result = self._backend.get(key)
if result is None:
result = default
setattr(self, key, default)
return result
return result
try:
asyncio.get_running_loop()
except RuntimeError:
return self._get_sync_value(key, default)
return AsyncValueProxy(key, self, default)
def __setattr__(self, key, value):
if key == "_backend":
super().__setattr__(key, value)
return
if key not in settings.CONFIG:
raise AttributeError(key)
self._backend.set(key, value)
return
async def aset(self, key, value):
if key not in settings.CONFIG:
raise AttributeError(key)
await self._backend.aset(key, value)
async def amget(self, keys):
backend_values = await self._backend.amget(keys)
# Merge with defaults like utils.get_values_for_keys
default_initial = {name: settings.CONFIG[name][0] for name in keys if name in settings.CONFIG}
return dict(default_initial, **backend_values)
def __dir__(self):
return settings.CONFIG.keys()

View file

@ -22,6 +22,8 @@ REDIS_CACHE_TIMEOUT = getattr(settings, "CONSTANCE_REDIS_CACHE_TIMEOUT", 60)
REDIS_CONNECTION_CLASS = getattr(settings, "CONSTANCE_REDIS_CONNECTION_CLASS", None)
REDIS_ASYNC_CONNECTION_CLASS = getattr(settings, "CONSTANCE_REDIS_ASYNC_CONNECTION_CLASS", None)
REDIS_CONNECTION = getattr(settings, "CONSTANCE_REDIS_CONNECTION", {})
SUPERUSER_ONLY = getattr(settings, "CONSTANCE_SUPERUSER_ONLY", True)

View file

@ -22,6 +22,16 @@ def get_values():
return dict(default_initial, **dict(config._backend.mget(settings.CONFIG)))
async def aget_values():
"""
Get dictionary of values from the backend asynchronously
:return:
"""
default_initial = {name: options[0] for name, options in settings.CONFIG.items()}
backend_values = await config.amget(settings.CONFIG.keys())
return dict(default_initial, **backend_values)
def get_values_for_keys(keys):
"""
Retrieve values for specified keys from the backend.
@ -43,3 +53,24 @@ def get_values_for_keys(keys):
# Merge default values and backend values, prioritizing backend values
return dict(default_initial, **dict(config._backend.mget(keys)))
async def aget_values_for_keys(keys):
"""
Retrieve values for specified keys from the backend asynchronously.
:param keys: List of keys to retrieve.
:return: Dictionary with values for the specified keys.
:raises AttributeError: If any key is not found in the configuration.
"""
if not isinstance(keys, (list, tuple, set)):
raise TypeError("keys must be a list, tuple, or set of strings")
default_initial = {name: options[0] for name, options in settings.CONFIG.items() if name in keys}
missing_keys = [key for key in keys if key not in default_initial]
if missing_keys:
raise AttributeError(f'"{", ".join(missing_keys)}" keys not found in configuration.')
backend_values = await config.amget(keys)
return dict(default_initial, **backend_values)

View file

@ -11,10 +11,11 @@ from datetime import datetime
def get_version():
# Try to get version from installed package metadata
try:
from importlib.metadata import PackageNotFoundError
from importlib.metadata import version
return version("django-constance")
except Exception:
except (ImportError, PackageNotFoundError):
pass
# Fall back to setuptools_scm generated version file

View file

@ -292,6 +292,36 @@ object and accessing the variables with attribute lookups::
if config.THE_ANSWER == 42:
answer_the_question()
Asynchronous usage
^^^^^^^^^^^^^^^^^^
If you are using Django's asynchronous features (like async views), you can ``await`` the settings directly on the standard ``config`` object::
from constance import config
async def my_async_view(request):
# Accessing settings is awaitable
if await config.THE_ANSWER == 42:
return await answer_the_question_async()
async def update_settings():
# Updating settings asynchronously
await config.aset('THE_ANSWER', 43)
# Bulk retrieval is supported as well
values = await config.amget(['THE_ANSWER', 'SITE_NAME'])
Performance and Safety
~~~~~~~~~~~~~~~~~~~~~~
While synchronous access (e.g., ``config.THE_ANSWER``) still works inside async views for some backends, it is highly discouraged:
* **Blocking:** Synchronous access blocks the event loop, reducing the performance of your entire application.
* **Safety Guards:** For the Database backend, Django's safety guards will raise a ``SynchronousOnlyOperation`` error if you attempt to access a setting synchronously from an async thread.
* **Automatic Detection:** Constance will emit a ``RuntimeWarning`` if it detects synchronous access inside an asynchronous event loop, helping you identify and fix these performance bottlenecks.
For peak performance, especially with the Redis backend, always use the ``await`` syntax which leverages native asynchronous drivers.
Django templates
^^^^^^^^^^^^^^^^

View file

@ -1,6 +1,8 @@
from django.test import TestCase
from django.test import TransactionTestCase
from constance import settings
from constance.base import Config
from tests.storage import StorageTestsMixin
@ -52,3 +54,95 @@ class TestDatabaseWithCache(StorageTestsMixin, TestCase):
def tearDown(self):
settings.BACKEND = self.old_backend
settings.DATABASE_CACHE_BACKEND = self.old_cache_backend
class TestDatabaseAsync(TransactionTestCase):
def setUp(self):
self.old_backend = settings.BACKEND
settings.BACKEND = "constance.backends.database.DatabaseBackend"
self.config = Config()
def tearDown(self):
settings.BACKEND = self.old_backend
async def test_aget_returns_none_for_missing_key(self):
result = await self.config._backend.aget("INT_VALUE")
self.assertIsNone(result)
async def test_aget_returns_value(self):
await self.config._backend.aset("INT_VALUE", 42)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
async def test_aset_stores_value(self):
await self.config._backend.aset("INT_VALUE", 99)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 99)
async def test_amget_returns_empty_for_no_keys(self):
result = await self.config._backend.amget([])
self.assertEqual(result, {})
async def test_amget_returns_values(self):
await self.config._backend.aset("INT_VALUE", 10)
await self.config._backend.aset("BOOL_VALUE", value=True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result, {"INT_VALUE": 10, "BOOL_VALUE": True})
class TestDatabaseWithCacheAsync(TransactionTestCase):
def setUp(self):
self.old_backend = settings.BACKEND
settings.BACKEND = "constance.backends.database.DatabaseBackend"
self.old_cache_backend = settings.DATABASE_CACHE_BACKEND
settings.DATABASE_CACHE_BACKEND = "default"
self.config = Config()
self.config._backend._cache.clear()
def tearDown(self):
settings.BACKEND = self.old_backend
settings.DATABASE_CACHE_BACKEND = self.old_cache_backend
async def test_aget_returns_none_for_missing_key(self):
result = await self.config._backend.aget("INT_VALUE")
self.assertIsNone(result)
async def test_aget_returns_value_from_cache(self):
# First set a value using async
await self.config._backend.aset("INT_VALUE", 42)
# Clear cache and re-fetch to test aget path
self.config._backend._cache.clear()
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
async def test_aget_populates_cache(self):
await self.config._backend.aset("INT_VALUE", 42)
self.config._backend._cache.clear()
# aget should populate the cache
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
async def test_aset_stores_value(self):
await self.config._backend.aset("INT_VALUE", 99)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 99)
async def test_amget_returns_empty_for_no_keys(self):
result = await self.config._backend.amget([])
self.assertEqual(result, {})
async def test_amget_returns_values(self):
await self.config._backend.aset("INT_VALUE", 10)
await self.config._backend.aset("BOOL_VALUE", value=True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result, {"INT_VALUE": 10, "BOOL_VALUE": True})
async def test_amget_uses_cache(self):
# Set values using async and ensure they're cached
await self.config._backend.aset("INT_VALUE", 10)
await self.config._backend.aset("BOOL_VALUE", value=True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result["INT_VALUE"], 10)
self.assertEqual(result["BOOL_VALUE"], True)

View file

@ -1,6 +1,8 @@
from django.test import TestCase
from django.test import TransactionTestCase
from constance import settings
from constance.base import Config
from tests.storage import StorageTestsMixin
@ -14,3 +16,43 @@ class TestMemory(StorageTestsMixin, TestCase):
def tearDown(self):
self.config._backend._storage = {}
settings.BACKEND = self.old_backend
def test_mget_empty_keys(self):
result = self.config._backend.mget([])
self.assertIsNone(result)
class TestMemoryAsync(TransactionTestCase):
def setUp(self):
self.old_backend = settings.BACKEND
settings.BACKEND = "constance.backends.memory.MemoryBackend"
self.config = Config()
self.config._backend._storage = {}
def tearDown(self):
self.config._backend._storage = {}
settings.BACKEND = self.old_backend
async def test_aget_returns_none_for_missing_key(self):
result = await self.config._backend.aget("INT_VALUE")
self.assertIsNone(result)
async def test_aget_returns_value(self):
self.config._backend.set("INT_VALUE", 42)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
async def test_aset_stores_value(self):
await self.config._backend.aset("INT_VALUE", 99)
result = self.config._backend.get("INT_VALUE")
self.assertEqual(result, 99)
async def test_amget_returns_empty_for_no_keys(self):
result = await self.config._backend.amget([])
self.assertEqual(result, {})
async def test_amget_returns_values(self):
self.config._backend.set("INT_VALUE", 10)
self.config._backend.set("BOOL_VALUE", value=True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result, {"INT_VALUE": 10, "BOOL_VALUE": True})

View file

@ -1,6 +1,12 @@
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test import TransactionTestCase
from constance import settings
from constance.backends.redisd import RedisBackend
from constance.base import Config
from tests.storage import StorageTestsMixin
@ -17,6 +23,241 @@ class TestRedis(StorageTestsMixin, TestCase):
self.config._backend._rd.clear()
settings.BACKEND = self.old_backend
def test_mget_empty_keys(self):
# Test that mget returns None for empty keys
result = list(self.config._backend.mget([]) or [])
self.assertEqual(result, [])
class TestCachingRedis(TestRedis):
_BACKEND = "constance.backends.redisd.CachingRedisBackend"
def test_mget_empty_keys(self):
# Test that mget returns None for empty keys
result = list(self.config._backend.mget([]) or [])
self.assertEqual(result, [])
class TestRedisAsync(TransactionTestCase):
_BACKEND = "constance.backends.redisd.RedisBackend"
def setUp(self):
self.old_backend = settings.BACKEND
settings.BACKEND = self._BACKEND
self.config = Config()
self.config._backend._rd.clear()
def tearDown(self):
self.config._backend._rd.clear()
settings.BACKEND = self.old_backend
async def test_aget_returns_none_for_missing_key(self):
result = await self.config._backend.aget("INT_VALUE")
self.assertIsNone(result)
async def test_aget_returns_value(self):
self.config._backend.set("INT_VALUE", 42)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
async def test_aset_stores_value(self):
await self.config._backend.aset("INT_VALUE", 99)
result = self.config._backend.get("INT_VALUE")
self.assertEqual(result, 99)
async def test_amget_returns_empty_for_no_keys(self):
result = await self.config._backend.amget([])
self.assertEqual(result, {})
async def test_amget_returns_values(self):
self.config._backend.set("INT_VALUE", 10)
self.config._backend.set("BOOL_VALUE", value=True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result, {"INT_VALUE": 10, "BOOL_VALUE": True})
async def test_amget_skips_missing_keys(self):
self.config._backend.set("INT_VALUE", 10)
result = await self.config._backend.amget(["INT_VALUE", "MISSING_KEY"])
self.assertEqual(result, {"INT_VALUE": 10})
class TestCachingRedisAsync(TransactionTestCase):
_BACKEND = "constance.backends.redisd.CachingRedisBackend"
def setUp(self):
self.old_backend = settings.BACKEND
settings.BACKEND = self._BACKEND
self.config = Config()
self.config._backend._rd.clear()
self.config._backend._cache.clear()
def tearDown(self):
self.config._backend._rd.clear()
self.config._backend._cache.clear()
settings.BACKEND = self.old_backend
async def test_aget_caches_value(self):
# First set a value via sync
self.config._backend.set("INT_VALUE", 42)
# Clear the in-memory cache
self.config._backend._cache.clear()
# Async get should fetch and cache
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 42)
# Verify it's cached
self.assertIn("INT_VALUE", self.config._backend._cache)
async def test_aget_returns_cached_value(self):
# Manually set cache
from time import monotonic
timeout = self.config._backend._timeout
self.config._backend._cache["INT_VALUE"] = (monotonic() + timeout, 100)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 100)
async def test_aget_refreshes_expired_cache(self):
from time import monotonic
# Set expired cache
self.config._backend._cache["INT_VALUE"] = (monotonic() - 10, 100)
# Set different value in redis using proper codec format
self.config._backend._rd.set(
self.config._backend.add_prefix("INT_VALUE"),
b'{"__type__": "default", "__value__": 200}',
)
result = await self.config._backend.aget("INT_VALUE")
self.assertEqual(result, 200)
async def test_aset_updates_cache(self):
await self.config._backend.aset("INT_VALUE", 55)
# Verify cache is updated
self.assertIn("INT_VALUE", self.config._backend._cache)
self.assertEqual(self.config._backend._cache["INT_VALUE"][1], 55)
async def test_amget_returns_empty_for_no_keys(self):
result = await self.config._backend.amget([])
self.assertEqual(result, {})
async def test_amget_returns_cached_values(self):
from time import monotonic
timeout = self.config._backend._timeout
self.config._backend._cache["INT_VALUE"] = (monotonic() + timeout, 10)
self.config._backend._cache["BOOL_VALUE"] = (monotonic() + timeout, True)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result, {"INT_VALUE": 10, "BOOL_VALUE": True})
async def test_amget_fetches_missing_keys(self):
from time import monotonic
timeout = self.config._backend._timeout
# One key cached, one in Redis only
self.config._backend._cache["INT_VALUE"] = (monotonic() + timeout, 10)
self.config._backend._rd.set(
self.config._backend.add_prefix("BOOL_VALUE"),
b'{"__type__": "default", "__value__": true}',
)
result = await self.config._backend.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(result["INT_VALUE"], 10)
self.assertEqual(result["BOOL_VALUE"], True)
async def test_amget_refreshes_expired_keys(self):
from time import monotonic
# Set expired cache
self.config._backend._cache["INT_VALUE"] = (monotonic() - 10, 100)
# Set different value in redis using proper codec format
self.config._backend._rd.set(
self.config._backend.add_prefix("INT_VALUE"),
b'{"__type__": "default", "__value__": 200}',
)
result = await self.config._backend.amget(["INT_VALUE"])
self.assertEqual(result["INT_VALUE"], 200)
class TestRedisBackendInit(TestCase):
"""Tests for RedisBackend.__init__ client initialization paths."""
def setUp(self):
self.old_conn_cls = settings.REDIS_CONNECTION_CLASS
self.old_async_conn_cls = settings.REDIS_ASYNC_CONNECTION_CLASS
self.old_conn = settings.REDIS_CONNECTION
def tearDown(self):
settings.REDIS_CONNECTION_CLASS = self.old_conn_cls
settings.REDIS_ASYNC_CONNECTION_CLASS = self.old_async_conn_cls
settings.REDIS_CONNECTION = self.old_conn
def test_no_redis_package_raises_improperly_configured(self):
settings.REDIS_CONNECTION_CLASS = None
settings.REDIS_ASYNC_CONNECTION_CLASS = "tests.redis_mockup.AsyncConnection"
with mock.patch.dict("sys.modules", {"redis": None}), self.assertRaises(ImproperlyConfigured):
RedisBackend()
def test_sync_redis_from_url_with_string_connection(self):
settings.REDIS_CONNECTION_CLASS = None
settings.REDIS_ASYNC_CONNECTION_CLASS = "tests.redis_mockup.AsyncConnection"
settings.REDIS_CONNECTION = "redis://localhost:6379/0"
mock_redis = mock.MagicMock()
with mock.patch.dict("sys.modules", {"redis": mock_redis, "redis.asyncio": mock_redis.asyncio}):
backend = RedisBackend()
mock_redis.from_url.assert_called_once_with("redis://localhost:6379/0")
self.assertEqual(backend._rd, mock_redis.from_url.return_value)
def test_sync_redis_with_dict_connection(self):
settings.REDIS_CONNECTION_CLASS = None
settings.REDIS_ASYNC_CONNECTION_CLASS = "tests.redis_mockup.AsyncConnection"
settings.REDIS_CONNECTION = {"host": "localhost", "port": 6379}
mock_redis = mock.MagicMock()
with mock.patch.dict("sys.modules", {"redis": mock_redis, "redis.asyncio": mock_redis.asyncio}):
backend = RedisBackend()
mock_redis.Redis.assert_called_once_with(host="localhost", port=6379)
self.assertEqual(backend._rd, mock_redis.Redis.return_value)
def test_async_redis_not_available_sets_ard_none(self):
settings.REDIS_CONNECTION_CLASS = "tests.redis_mockup.Connection"
settings.REDIS_ASYNC_CONNECTION_CLASS = None
mock_redis = mock.MagicMock()
# Simulate redis.asyncio not being available
with mock.patch.dict("sys.modules", {"redis": mock_redis, "redis.asyncio": None}):
backend = RedisBackend()
self.assertIsNone(backend._ard)
def test_async_redis_from_url_with_string_connection(self):
settings.REDIS_CONNECTION_CLASS = "tests.redis_mockup.Connection"
settings.REDIS_ASYNC_CONNECTION_CLASS = None
settings.REDIS_CONNECTION = "redis://localhost:6379/0"
mock_aredis = mock.MagicMock()
mock_redis = mock.MagicMock()
mock_redis.asyncio = mock_aredis
with mock.patch.dict("sys.modules", {"redis": mock_redis, "redis.asyncio": mock_aredis}):
backend = RedisBackend()
mock_aredis.from_url.assert_called_once_with("redis://localhost:6379/0")
self.assertEqual(backend._ard, mock_aredis.from_url.return_value)
def test_async_redis_with_dict_connection(self):
settings.REDIS_CONNECTION_CLASS = "tests.redis_mockup.Connection"
settings.REDIS_ASYNC_CONNECTION_CLASS = None
settings.REDIS_CONNECTION = {"host": "localhost", "port": 6379}
mock_aredis = mock.MagicMock()
mock_redis = mock.MagicMock()
mock_redis.asyncio = mock_aredis
with mock.patch.dict("sys.modules", {"redis": mock_redis, "redis.asyncio": mock_aredis}):
backend = RedisBackend()
mock_aredis.Redis.assert_called_once_with(host="localhost", port=6379)
self.assertEqual(backend._ard, mock_aredis.Redis.return_value)
def test_check_async_support_raises_when_ard_is_none(self):
backend = RedisBackend()
backend._ard = None
with self.assertRaises(ImproperlyConfigured):
backend._check_async_support()

View file

@ -10,3 +10,16 @@ class Cache(BaseCache):
self.set = self._cache.set
self.get = self._cache.get
self.clear = self._cache.clear
self.set_many = self._cache.set_many
self.get_many = self._cache.get_many
self.delete_many = self._cache.delete_many
# Async methods for DatabaseBackend.aget() support
async def aget(self, key, default=None, version=None):
return self.get(key, default, version)
async def aget_many(self, keys, version=None):
return self.get_many(keys, version)
async def aadd(self, key, value, timeout=None, version=None):
return self.add(key, value, timeout, version)

View file

@ -1,6 +1,28 @@
class Connection(dict):
# Shared storage so sync (Connection) and async (AsyncConnection) instances
# operate on the same underlying data, just like a real Redis server would.
_shared_store = {}
class Connection:
def set(self, key, value):
self[key] = value
_shared_store[key] = value
def get(self, key, default=None):
return _shared_store.get(key, default)
def mget(self, keys):
return [self.get(key) for key in keys]
return [_shared_store.get(key) for key in keys]
def clear(self):
_shared_store.clear()
class AsyncConnection:
async def set(self, key, value):
_shared_store[key] = value
async def get(self, key):
return _shared_store.get(key)
async def mget(self, keys):
return [_shared_store.get(key) for key in keys]

View file

@ -50,6 +50,7 @@ INSTALLED_APPS = (
ROOT_URLCONF = "tests.urls"
CONSTANCE_REDIS_CONNECTION_CLASS = "tests.redis_mockup.Connection"
CONSTANCE_REDIS_ASYNC_CONNECTION_CLASS = "tests.redis_mockup.AsyncConnection"
CONSTANCE_ADDITIONAL_FIELDS = {
"yes_no_null_select": [

198
tests/test_async.py Normal file
View file

@ -0,0 +1,198 @@
import warnings
from django.test import TransactionTestCase
from constance import config
from constance import utils
class AsyncTestCase(TransactionTestCase):
async def test_async_get(self):
# Accessing an attribute on config should be awaitable when in async context
val = await config.INT_VALUE
self.assertEqual(val, 1)
async def test_async_set(self):
await config.aset("INT_VALUE", 42)
val = await config.INT_VALUE
self.assertEqual(val, 42)
# Verify sync access also works (and emits warning)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
sync_val = int(config.INT_VALUE)
self.assertEqual(sync_val, 42)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_amget(self):
values = await config.amget(["INT_VALUE", "BOOL_VALUE"])
self.assertEqual(values["INT_VALUE"], 1)
self.assertEqual(values["BOOL_VALUE"], True)
async def test_sync_math_in_async_loop(self):
# Accessing math should work but emit warning
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
res = config.INT_VALUE + 10
# Note: res will be 42 + 10 if test_async_set ran before, or 1 + 10 if not.
# TransactionTestCase should reset state, but let's be careful.
# config.INT_VALUE defaults to 1.
self.assertEqual(res, 11 if res < 50 else 52)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_utils_aget_values(self):
values = await utils.aget_values()
self.assertIn("INT_VALUE", values)
self.assertIn("BOOL_VALUE", values)
self.assertEqual(values["INT_VALUE"], 1)
async def test_utils_aget_values_for_keys(self):
values = await utils.aget_values_for_keys(["INT_VALUE"])
self.assertEqual(len(values), 1)
self.assertEqual(values["INT_VALUE"], 1)
async def test_bool_proxy(self):
# BOOL_VALUE is True by default
self.assertTrue(config.BOOL_VALUE)
async def test_int_proxy(self):
await config.aset("INT_VALUE", 1)
self.assertEqual(int(config.INT_VALUE), 1)
async def test_container_proxy(self):
# LIST_VALUE is [1, "1", date(2019, 1, 1)] by default
self.assertEqual(config.LIST_VALUE[0], 1)
self.assertEqual(len(config.LIST_VALUE), 3)
self.assertIn(1, config.LIST_VALUE)
self.assertEqual(next(iter(config.LIST_VALUE)), 1)
class AsyncValueProxyTestCase(TransactionTestCase):
"""Tests for AsyncValueProxy dunder methods in async context."""
async def test_str_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = str(config.STRING_VALUE)
self.assertEqual(result, "Hello world")
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_repr_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = repr(config.STRING_VALUE)
self.assertEqual(result, "'Hello world'")
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_float_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = float(config.FLOAT_VALUE)
self.assertAlmostEqual(result, 3.1415926536)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_eq_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE == 1
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_ne_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE != 2
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_lt_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE < 10
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_le_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE <= 1
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_gt_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE > 0
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_ge_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE >= 1
self.assertTrue(result)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_hash_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = hash(config.INT_VALUE)
self.assertEqual(result, hash(1))
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_sub_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE - 1
self.assertEqual(result, 0)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_mul_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE * 5
self.assertEqual(result, 5)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_truediv_proxy(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = config.INT_VALUE / 1
self.assertEqual(result, 1.0)
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
async def test_aset_invalid_key(self):
with self.assertRaises(AttributeError):
await config.aset("INVALID_KEY", 42)
class AsyncUtilsTestCase(TransactionTestCase):
"""Tests for async utility functions."""
async def test_aget_values_for_keys_invalid_type(self):
with self.assertRaises(TypeError):
await utils.aget_values_for_keys("key1")
async def test_aget_values_for_keys_missing_key(self):
with self.assertRaises(AttributeError) as ctx:
await utils.aget_values_for_keys(["INVALID_KEY"])
self.assertIn("INVALID_KEY", str(ctx.exception))
async def test_aget_values_for_keys_empty(self):
result = await utils.aget_values_for_keys([])
self.assertEqual(result, {})
class ConfigBaseTestCase(TransactionTestCase):
"""Tests for Config class edge cases."""
def test_config_dir(self):
# Test __dir__ method
keys = dir(config)
self.assertIn("INT_VALUE", keys)
self.assertIn("BOOL_VALUE", keys)
def test_access_backend_attribute(self):
# Test accessing _backend attribute in sync context
backend = config._backend
self.assertIsNotNone(backend)