mirror of
https://github.com/jazzband/django-constance.git
synced 2026-03-16 22:40:24 +00:00
Added async logic
This commit is contained in:
parent
d2b8ca12d5
commit
f8fa58cd75
11 changed files with 476 additions and 6 deletions
1
AUTHORS
1
AUTHORS
|
|
@ -32,6 +32,7 @@ Merijn Bertels <merijn.bertels@gmail.com>
|
|||
Omer Katz <omer.drow@gmail.com>
|
||||
Petr Knap <dev@petrknap.cz>
|
||||
Philip Neustrom <philipn@gmail.com>
|
||||
Philipp Thumfart <philipp@thumfart.eu>
|
||||
Pierre-Olivier Marec <pomarec@free.fr>
|
||||
Roman Krejcik <farin@farin.cz>
|
||||
Silvan Spross <silvan.spross@gmail.com>
|
||||
|
|
|
|||
|
|
@ -9,6 +9,13 @@ class Backend:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def aget(self, key):
|
||||
"""
|
||||
Get the key from the backend store and return the value.
|
||||
Return None if not found.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def mget(self, keys):
|
||||
"""
|
||||
Get the keys from the backend store and return a list of the values.
|
||||
|
|
@ -16,6 +23,17 @@ class Backend:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def amget(self, keys):
|
||||
"""
|
||||
Get the keys from the backend store and return a list of the values.
|
||||
Return an empty list if not found.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def set(self, key, value):
|
||||
"""Add the value to the backend store given the key."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def aset(self, key, value):
|
||||
"""Add the value to the backend store given the key."""
|
||||
raise NotImplementedError
|
||||
|
|
|
|||
|
|
@ -85,6 +85,46 @@ class DatabaseBackend(Backend):
|
|||
self._cache.add(key, value)
|
||||
return value
|
||||
|
||||
async def aget(self, key):
|
||||
from asgiref.sync import sync_to_async
|
||||
|
||||
prefixed_key = self.add_prefix(key)
|
||||
value = None
|
||||
if self._cache:
|
||||
value = await self._cache.aget(prefixed_key)
|
||||
if value is None:
|
||||
await sync_to_async(self.autofill)()
|
||||
value = await self._cache.aget(prefixed_key)
|
||||
if value is None:
|
||||
match = await self._model._default_manager.filter(key=prefixed_key).only("value").afirst()
|
||||
if match:
|
||||
value = loads(match.value)
|
||||
if self._cache:
|
||||
await self._cache.aadd(prefixed_key, value)
|
||||
return value
|
||||
|
||||
async def amget(self, keys):
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
prefixed_keys_map = {self.add_prefix(key): key for key in keys}
|
||||
results = {}
|
||||
|
||||
if self._cache:
|
||||
cache_results = await self._cache.aget_many(prefixed_keys_map.keys())
|
||||
for prefixed_key, value in cache_results.items():
|
||||
results[prefixed_keys_map[prefixed_key]] = value
|
||||
|
||||
missing_prefixed_keys = [k for k in prefixed_keys_map if prefixed_keys_map[k] not in results]
|
||||
if missing_prefixed_keys:
|
||||
try:
|
||||
async for const in self._model._default_manager.filter(key__in=missing_prefixed_keys):
|
||||
results[prefixed_keys_map[const.key]] = loads(const.value)
|
||||
except (OperationalError, ProgrammingError):
|
||||
pass
|
||||
|
||||
return results
|
||||
|
||||
def set(self, key, value):
|
||||
key = self.add_prefix(key)
|
||||
created = False
|
||||
|
|
@ -119,6 +159,13 @@ class DatabaseBackend(Backend):
|
|||
|
||||
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
|
||||
|
||||
async def aset(self, key, value):
|
||||
from asgiref.sync import sync_to_async
|
||||
|
||||
# We use sync_to_async because Django's transaction.atomic() and database connections are thread-local.
|
||||
# This ensures the operation runs in the correct database thread until native async transactions are supported.
|
||||
return await sync_to_async(self.set)(key, value)
|
||||
|
||||
def clear(self, sender, instance, created, **kwargs):
|
||||
if self._cache and not created:
|
||||
keys = [self.add_prefix(k) for k in settings.CONFIG]
|
||||
|
|
|
|||
|
|
@ -19,6 +19,10 @@ class MemoryBackend(Backend):
|
|||
with self._lock:
|
||||
return self._storage.get(key)
|
||||
|
||||
async def aget(self, key):
|
||||
# Memory operations are fast enough that we don't need true async here
|
||||
return self.get(key)
|
||||
|
||||
def mget(self, keys):
|
||||
if not keys:
|
||||
return None
|
||||
|
|
@ -30,8 +34,18 @@ class MemoryBackend(Backend):
|
|||
result.append((key, value))
|
||||
return result
|
||||
|
||||
async def amget(self, keys):
|
||||
if not keys:
|
||||
return {}
|
||||
with self._lock:
|
||||
return {key: self._storage[key] for key in keys if key in self._storage}
|
||||
|
||||
def set(self, key, value):
|
||||
with self._lock:
|
||||
old_value = self._storage.get(key)
|
||||
self._storage[key] = value
|
||||
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
|
||||
|
||||
async def aset(self, key, value):
|
||||
# Memory operations are fast enough that we don't need true async here
|
||||
self.set(key, value)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import asyncio
|
||||
from threading import RLock
|
||||
from time import monotonic
|
||||
|
||||
|
|
@ -19,16 +20,28 @@ class RedisBackend(Backend):
|
|||
connection_cls = settings.REDIS_CONNECTION_CLASS
|
||||
if connection_cls is not None:
|
||||
self._rd = utils.import_module_attr(connection_cls)()
|
||||
self._ard = self._rd
|
||||
else:
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
raise ImproperlyConfigured("The Redis backend requires redis-py to be installed.") from None
|
||||
|
||||
if isinstance(settings.REDIS_CONNECTION, str):
|
||||
self._rd = redis.from_url(settings.REDIS_CONNECTION)
|
||||
else:
|
||||
self._rd = redis.Redis(**settings.REDIS_CONNECTION)
|
||||
|
||||
try:
|
||||
import redis.asyncio as aredis
|
||||
|
||||
if isinstance(settings.REDIS_CONNECTION, str):
|
||||
self._ard = aredis.from_url(settings.REDIS_CONNECTION)
|
||||
else:
|
||||
self._ard = aredis.Redis(**settings.REDIS_CONNECTION)
|
||||
except ImportError:
|
||||
self._ard = self._rd
|
||||
|
||||
def add_prefix(self, key):
|
||||
return f"{self._prefix}{key}"
|
||||
|
||||
|
|
@ -38,6 +51,15 @@ class RedisBackend(Backend):
|
|||
return loads(value)
|
||||
return None
|
||||
|
||||
async def aget(self, key):
|
||||
if hasattr(self._ard, "aget"):
|
||||
value = await self._ard.aget(self.add_prefix(key))
|
||||
else:
|
||||
value = await asyncio.to_thread(self._rd.get, self.add_prefix(key))
|
||||
if value:
|
||||
return loads(value)
|
||||
return None
|
||||
|
||||
def mget(self, keys):
|
||||
if not keys:
|
||||
return
|
||||
|
|
@ -46,15 +68,36 @@ class RedisBackend(Backend):
|
|||
if value:
|
||||
yield key, loads(value)
|
||||
|
||||
async def amget(self, keys):
|
||||
if not keys:
|
||||
return {}
|
||||
prefixed_keys = [self.add_prefix(key) for key in keys]
|
||||
if hasattr(self._ard, "amget"):
|
||||
values = await self._ard.amget(prefixed_keys)
|
||||
else:
|
||||
values = await asyncio.to_thread(self._rd.mget, prefixed_keys)
|
||||
return {key: loads(value) for key, value in zip(keys, values) if value}
|
||||
|
||||
def set(self, key, value):
|
||||
old_value = self.get(key)
|
||||
self._rd.set(self.add_prefix(key), dumps(value))
|
||||
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
|
||||
|
||||
async def aset(self, key, value):
|
||||
# We need the old value for the signal.
|
||||
# Signals are synchronous in Django, but we can't easily change that here.
|
||||
old_value = await self.aget(key)
|
||||
if hasattr(self._ard, "aset"):
|
||||
await self._ard.aset(self.add_prefix(key), dumps(value))
|
||||
else:
|
||||
await asyncio.to_thread(self._rd.set, self.add_prefix(key), dumps(value))
|
||||
signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value)
|
||||
|
||||
|
||||
class CachingRedisBackend(RedisBackend):
|
||||
_sentinel = object()
|
||||
_lock = RLock()
|
||||
_async_lock = None # Lazy-initialized asyncio.Lock
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
|
@ -62,6 +105,12 @@ class CachingRedisBackend(RedisBackend):
|
|||
self._cache = {}
|
||||
self._sentinel = object()
|
||||
|
||||
def _get_async_lock(self):
|
||||
# Lazily create the asyncio lock to avoid issues with event loops
|
||||
if self._async_lock is None:
|
||||
self._async_lock = asyncio.Lock()
|
||||
return self._async_lock
|
||||
|
||||
def _has_expired(self, value):
|
||||
return value[0] <= monotonic()
|
||||
|
||||
|
|
@ -79,11 +128,31 @@ class CachingRedisBackend(RedisBackend):
|
|||
|
||||
return value[1]
|
||||
|
||||
async def aget(self, key):
|
||||
value = self._cache.get(key, self._sentinel)
|
||||
|
||||
if value is self._sentinel or self._has_expired(value):
|
||||
async with self._get_async_lock():
|
||||
# Double-check after acquiring lock
|
||||
value = self._cache.get(key, self._sentinel)
|
||||
if value is self._sentinel or self._has_expired(value):
|
||||
new_value = await super().aget(key)
|
||||
self._cache_value(key, new_value)
|
||||
return new_value
|
||||
return value[1]
|
||||
|
||||
return value[1]
|
||||
|
||||
def set(self, key, value):
|
||||
with self._lock:
|
||||
super().set(key, value)
|
||||
self._cache_value(key, value)
|
||||
|
||||
async def aset(self, key, value):
|
||||
async with self._get_async_lock():
|
||||
await super().aset(key, value)
|
||||
self._cache_value(key, value)
|
||||
|
||||
def mget(self, keys):
|
||||
if not keys:
|
||||
return
|
||||
|
|
@ -91,3 +160,38 @@ class CachingRedisBackend(RedisBackend):
|
|||
value = self.get(key)
|
||||
if value is not None:
|
||||
yield key, value
|
||||
|
||||
async def amget(self, keys):
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
results = {}
|
||||
missing_keys = []
|
||||
|
||||
# First, check the local cache for all keys
|
||||
for key in keys:
|
||||
value = self._cache.get(key, self._sentinel)
|
||||
if value is not self._sentinel and not self._has_expired(value):
|
||||
results[key] = value[1]
|
||||
else:
|
||||
missing_keys.append(key)
|
||||
|
||||
# Fetch missing keys from Redis
|
||||
if missing_keys:
|
||||
async with self._get_async_lock():
|
||||
# Re-check cache for keys that might have been fetched while waiting for lock
|
||||
still_missing = []
|
||||
for key in missing_keys:
|
||||
value = self._cache.get(key, self._sentinel)
|
||||
if value is not self._sentinel and not self._has_expired(value):
|
||||
results[key] = value[1]
|
||||
else:
|
||||
still_missing.append(key)
|
||||
|
||||
if still_missing:
|
||||
fetched = await super().amget(still_missing)
|
||||
for key, value in fetched.items():
|
||||
self._cache_value(key, value)
|
||||
results[key] = value
|
||||
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -1,31 +1,147 @@
|
|||
import asyncio
|
||||
import warnings
|
||||
|
||||
from . import settings
|
||||
from . import utils
|
||||
|
||||
|
||||
class AsyncValueProxy:
|
||||
def __init__(self, key, config, default):
|
||||
self._key = key
|
||||
self._config = config
|
||||
self._default = default
|
||||
self._value = None
|
||||
self._fetched = False
|
||||
|
||||
def __await__(self):
|
||||
return self._get_value().__await__()
|
||||
|
||||
async def _get_value(self):
|
||||
if not self._fetched:
|
||||
result = await self._config._backend.aget(self._key)
|
||||
if result is None:
|
||||
result = self._default
|
||||
await self._config.aset(self._key, result)
|
||||
self._value = result
|
||||
self._fetched = True
|
||||
return self._value
|
||||
|
||||
def _get_sync_value(self):
|
||||
warnings.warn(
|
||||
f"Synchronous access to Constance setting '{self._key}' inside an async loop. "
|
||||
f"Use 'await config.{self._key}' instead.",
|
||||
RuntimeWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
return self._config._get_sync_value(self._key, self._default)
|
||||
|
||||
def __str__(self):
|
||||
return str(self._get_sync_value())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._get_sync_value())
|
||||
|
||||
def __int__(self):
|
||||
return int(self._get_sync_value())
|
||||
|
||||
def __float__(self):
|
||||
return float(self._get_sync_value())
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self._get_sync_value())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._get_sync_value() == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._get_sync_value() != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._get_sync_value() < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._get_sync_value() <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._get_sync_value() > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._get_sync_value() >= other
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._get_sync_value()[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._get_sync_value())
|
||||
|
||||
def __len__(self):
|
||||
return len(self._get_sync_value())
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._get_sync_value()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._get_sync_value())
|
||||
|
||||
def __add__(self, other):
|
||||
return self._get_sync_value() + other
|
||||
|
||||
def __sub__(self, other):
|
||||
return self._get_sync_value() - other
|
||||
|
||||
def __mul__(self, other):
|
||||
return self._get_sync_value() * other
|
||||
|
||||
def __truediv__(self, other):
|
||||
return self._get_sync_value() / other
|
||||
|
||||
|
||||
class Config:
|
||||
"""The global config wrapper that handles the backend."""
|
||||
|
||||
def __init__(self):
|
||||
super().__setattr__("_backend", utils.import_module_attr(settings.BACKEND)())
|
||||
|
||||
def _get_sync_value(self, key, default):
|
||||
result = self._backend.get(key)
|
||||
if result is None:
|
||||
result = default
|
||||
setattr(self, key, default)
|
||||
return result
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in ("_backend",):
|
||||
return super().__getattribute__(key)
|
||||
try:
|
||||
if len(settings.CONFIG[key]) not in (2, 3):
|
||||
raise AttributeError(key)
|
||||
default = settings.CONFIG[key][0]
|
||||
except KeyError as e:
|
||||
raise AttributeError(key) from e
|
||||
result = self._backend.get(key)
|
||||
if result is None:
|
||||
result = default
|
||||
setattr(self, key, default)
|
||||
return result
|
||||
return result
|
||||
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
return AsyncValueProxy(key, self, default)
|
||||
except RuntimeError:
|
||||
return self._get_sync_value(key, default)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == "_backend":
|
||||
return super().__setattr__(key, value)
|
||||
if key not in settings.CONFIG:
|
||||
raise AttributeError(key)
|
||||
self._backend.set(key, value)
|
||||
|
||||
async def aset(self, key, value):
|
||||
if key not in settings.CONFIG:
|
||||
raise AttributeError(key)
|
||||
await self._backend.aset(key, value)
|
||||
|
||||
async def amget(self, keys):
|
||||
backend_values = await self._backend.amget(keys)
|
||||
# Merge with defaults like utils.get_values_for_keys
|
||||
default_initial = {name: settings.CONFIG[name][0] for name in keys if name in settings.CONFIG}
|
||||
return dict(default_initial, **backend_values)
|
||||
|
||||
def __dir__(self):
|
||||
return settings.CONFIG.keys()
|
||||
|
|
|
|||
|
|
@ -22,6 +22,16 @@ def get_values():
|
|||
return dict(default_initial, **dict(config._backend.mget(settings.CONFIG)))
|
||||
|
||||
|
||||
async def aget_values():
|
||||
"""
|
||||
Get dictionary of values from the backend asynchronously
|
||||
:return:
|
||||
"""
|
||||
default_initial = {name: options[0] for name, options in settings.CONFIG.items()}
|
||||
backend_values = await config.amget(settings.CONFIG.keys())
|
||||
return dict(default_initial, **backend_values)
|
||||
|
||||
|
||||
def get_values_for_keys(keys):
|
||||
"""
|
||||
Retrieve values for specified keys from the backend.
|
||||
|
|
@ -43,3 +53,24 @@ def get_values_for_keys(keys):
|
|||
|
||||
# Merge default values and backend values, prioritizing backend values
|
||||
return dict(default_initial, **dict(config._backend.mget(keys)))
|
||||
|
||||
|
||||
async def aget_values_for_keys(keys):
|
||||
"""
|
||||
Retrieve values for specified keys from the backend asynchronously.
|
||||
|
||||
:param keys: List of keys to retrieve.
|
||||
:return: Dictionary with values for the specified keys.
|
||||
:raises AttributeError: If any key is not found in the configuration.
|
||||
"""
|
||||
if not isinstance(keys, (list, tuple, set)):
|
||||
raise TypeError("keys must be a list, tuple, or set of strings")
|
||||
|
||||
default_initial = {name: options[0] for name, options in settings.CONFIG.items() if name in keys}
|
||||
|
||||
missing_keys = [key for key in keys if key not in default_initial]
|
||||
if missing_keys:
|
||||
raise AttributeError(f'"{", ".join(missing_keys)}" keys not found in configuration.')
|
||||
|
||||
backend_values = await config.amget(keys)
|
||||
return dict(default_initial, **backend_values)
|
||||
|
|
|
|||
|
|
@ -292,6 +292,36 @@ object and accessing the variables with attribute lookups::
|
|||
if config.THE_ANSWER == 42:
|
||||
answer_the_question()
|
||||
|
||||
Asynchronous usage
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you are using Django's asynchronous features (like async views), you can ``await`` the settings directly on the standard ``config`` object::
|
||||
|
||||
from constance import config
|
||||
|
||||
async def my_async_view(request):
|
||||
# Accessing settings is awaitable
|
||||
if await config.THE_ANSWER == 42:
|
||||
return await answer_the_question_async()
|
||||
|
||||
async def update_settings():
|
||||
# Updating settings asynchronously
|
||||
await config.aset('THE_ANSWER', 43)
|
||||
|
||||
# Bulk retrieval is supported as well
|
||||
values = await config.amget(['THE_ANSWER', 'SITE_NAME'])
|
||||
|
||||
Performance and Safety
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
While synchronous access (e.g., ``config.THE_ANSWER``) still works inside async views for some backends, it is highly discouraged:
|
||||
|
||||
* **Blocking:** Synchronous access blocks the event loop, reducing the performance of your entire application.
|
||||
* **Safety Guards:** For the Database backend, Django's safety guards will raise a ``SynchronousOnlyOperation`` error if you attempt to access a setting synchronously from an async thread.
|
||||
* **Automatic Detection:** Constance will emit a ``RuntimeWarning`` if it detects synchronous access inside an asynchronous event loop, helping you identify and fix these performance bottlenecks.
|
||||
|
||||
For peak performance, especially with the Redis backend, always use the ``await`` syntax which leverages native asynchronous drivers.
|
||||
|
||||
Django templates
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
|
|
|
|||
|
|
@ -10,3 +10,16 @@ class Cache(BaseCache):
|
|||
self.set = self._cache.set
|
||||
self.get = self._cache.get
|
||||
self.clear = self._cache.clear
|
||||
self.set_many = self._cache.set_many
|
||||
self.get_many = self._cache.get_many
|
||||
self.delete_many = self._cache.delete_many
|
||||
|
||||
# Async methods for DatabaseBackend.aget() support
|
||||
async def aget(self, key, default=None, version=None):
|
||||
return self.get(key, default, version)
|
||||
|
||||
async def aget_many(self, keys, version=None):
|
||||
return self.get_many(keys, version)
|
||||
|
||||
async def aadd(self, key, value, timeout=None, version=None):
|
||||
return self.add(key, value, timeout, version)
|
||||
|
|
|
|||
|
|
@ -2,5 +2,31 @@ class Connection(dict):
|
|||
def set(self, key, value):
|
||||
self[key] = value
|
||||
|
||||
async def aset(self, key, value):
|
||||
# Keep this for backward compatibility with previous commit if needed
|
||||
self.set(key, value)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return super().get(key, default)
|
||||
|
||||
async def aget(self, key):
|
||||
# Keep this for backward compatibility
|
||||
return self.get(key)
|
||||
|
||||
def mget(self, keys):
|
||||
return [self.get(key) for key in keys]
|
||||
|
||||
async def amget(self, keys):
|
||||
# Keep this for backward compatibility
|
||||
return self.mget(keys)
|
||||
|
||||
|
||||
class AsyncConnection(Connection):
|
||||
async def set(self, key, value):
|
||||
super().set(key, value)
|
||||
|
||||
async def get(self, key):
|
||||
return super().get(key)
|
||||
|
||||
async def mget(self, keys):
|
||||
return super().mget(keys)
|
||||
|
|
|
|||
70
tests/test_async.py
Normal file
70
tests/test_async.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import warnings
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from constance import config
|
||||
from constance import utils
|
||||
|
||||
|
||||
class AsyncTestCase(TransactionTestCase):
|
||||
async def test_async_get(self):
|
||||
# Accessing an attribute on config should be awaitable when in async context
|
||||
val = await config.INT_VALUE
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
async def test_async_set(self):
|
||||
await config.aset("INT_VALUE", 42)
|
||||
val = await config.INT_VALUE
|
||||
self.assertEqual(val, 42)
|
||||
|
||||
# Verify sync access also works (and emits warning)
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
sync_val = int(config.INT_VALUE)
|
||||
self.assertEqual(sync_val, 42)
|
||||
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
|
||||
|
||||
async def test_amget(self):
|
||||
values = await config.amget(["INT_VALUE", "BOOL_VALUE"])
|
||||
self.assertEqual(values["INT_VALUE"], 1)
|
||||
self.assertEqual(values["BOOL_VALUE"], True)
|
||||
|
||||
async def test_sync_math_in_async_loop(self):
|
||||
# Accessing math should work but emit warning
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
res = config.INT_VALUE + 10
|
||||
# Note: res will be 42 + 10 if test_async_set ran before, or 1 + 10 if not.
|
||||
# TransactionTestCase should reset state, but let's be careful.
|
||||
# config.INT_VALUE defaults to 1.
|
||||
self.assertEqual(res, 11 if res < 50 else 52)
|
||||
self.assertTrue(any("Synchronous access" in str(warn.message) for warn in w))
|
||||
|
||||
async def test_utils_aget_values(self):
|
||||
values = await utils.aget_values()
|
||||
self.assertIn("INT_VALUE", values)
|
||||
self.assertIn("BOOL_VALUE", values)
|
||||
self.assertEqual(values["INT_VALUE"], 1)
|
||||
|
||||
async def test_utils_aget_values_for_keys(self):
|
||||
values = await utils.aget_values_for_keys(["INT_VALUE"])
|
||||
self.assertEqual(len(values), 1)
|
||||
self.assertEqual(values["INT_VALUE"], 1)
|
||||
|
||||
async def test_bool_proxy(self):
|
||||
# BOOL_VALUE is True by default
|
||||
if config.BOOL_VALUE:
|
||||
self.assertTrue(True)
|
||||
else:
|
||||
self.fail("BOOL_VALUE should be True")
|
||||
|
||||
async def test_int_proxy(self):
|
||||
await config.aset("INT_VALUE", 1)
|
||||
self.assertEqual(int(config.INT_VALUE), 1)
|
||||
|
||||
async def test_container_proxy(self):
|
||||
# LIST_VALUE is [1, "1", date(2019, 1, 1)] by default
|
||||
self.assertEqual(config.LIST_VALUE[0], 1)
|
||||
self.assertEqual(len(config.LIST_VALUE), 3)
|
||||
self.assertIn(1, config.LIST_VALUE)
|
||||
self.assertEqual(list(config.LIST_VALUE)[0], 1)
|
||||
Loading…
Reference in a new issue