Small Python optimizations.

While keeping code compatible with Python 2.6 to 3.4, of course…
This commit is contained in:
Bertrand Bordage 2015-01-03 06:38:16 +01:00
parent ce9c1c4d55
commit 352fb2d829
3 changed files with 48 additions and 27 deletions

View file

@ -53,11 +53,15 @@ def _get_result_or_execute_query(execute_query_func, cache_key,
cache = cachalot_caches.get_cache()
data = cache.get_many(table_cache_keys + [cache_key])
new_table_cache_keys = frozenset(table_cache_keys) - frozenset(data)
new_table_cache_keys = set(table_cache_keys)
new_table_cache_keys.difference_update(data)
if new_table_cache_keys:
now = time()
cache.set_many(dict([(k, now) for k in new_table_cache_keys]), None)
d = {}
for k in new_table_cache_keys:
d[k] = now
cache.set_many(d, None)
elif cache_key in data:
timestamp, result = data.pop(cache_key)
table_times = data.values()

View file

@ -15,7 +15,10 @@ class AtomicCache(dict):
self[k] = v
def get_many(self, keys):
data = dict([(k, self[k]) for k in keys if k in self])
data = {}
for k in keys:
if k in self:
data[k] = self[k]
missing_keys = set(keys)
missing_keys.difference_update(data)
data.update(self.parent_cache.get_many(missing_keys))

View file

@ -1,7 +1,7 @@
# coding: utf-8
from __future__ import unicode_literals
from hashlib import md5
from hashlib import sha1
from time import time
import django
@ -15,8 +15,12 @@ else:
from .settings import cachalot_settings
def _hash_cache_key(unicode_key):
return md5(unicode_key.encode('utf-8')).hexdigest()
# The only cache backend with a key length limit is memcached (limited to
# 255 characters). However, we hash keys on other backends as well to avoid
# unnecessary huge communication between processes.
# We set the limit to something smaller than 255 because a prefix might be
# added by Django.
MAX_CACHE_KEY_LENGTH = 200
def get_query_cache_key(compiler):
@ -30,10 +34,13 @@ def get_query_cache_key(compiler):
:arg compiler: A SQLCompiler that will generate the SQL query
:type compiler: django.db.models.sql.compiler.SQLCompiler
:return: A cache key
:rtype: str or unicode
:rtype: str
"""
sql, params = compiler.as_sql()
return _hash_cache_key('%s:%s:%s' % (compiler.using, sql, params))
cache_key = ('%s:%s:%s' % (compiler.using, sql, params)).encode('utf-8')
# We always hash queries since they are nearly always longer than
# ``MAX_CACHE_KEY_LENGTH``.
return sha1(cache_key).hexdigest()
def get_table_cache_key(db_alias, table):
@ -45,9 +52,14 @@ def get_table_cache_key(db_alias, table):
:arg table: Name of the SQL table
:type table: str or unicode
:return: A cache key
:rtype: str or unicode
:rtype: str
"""
return _hash_cache_key('%s:%s' % (db_alias, table))
cache_key = ('%s:%s' % (db_alias, table)).encode('utf-8')
# We check if we have to hash the key since it should nearly never be
# necessary.
if len(cache_key) > MAX_CACHE_KEY_LENGTH:
return sha1(cache_key).hexdigest()
return cache_key
def _get_query_cache_key(compiler):
@ -63,37 +75,39 @@ def _get_tables_from_sql(connection, lowercased_sql):
if t in lowercased_sql]
def _get_tables(compiler):
def _get_table_cache_keys(compiler):
"""
Returns a ``set`` of all SQL table names used by ``compiler``.
Returns a ``list`` of cache keys for all the SQL tables used
by ``compiler``.
:arg compiler: A SQLCompiler that will generate the SQL query
:type compiler: django.db.models.sql.compiler.SQLCompiler
:return: All the SQL table names
:rtype: set
:return: Cache keys for the SQL tables used
:rtype: list
"""
query = compiler.query
using = compiler.using
tables = set(query.tables)
tables.add(query.model._meta.db_table)
if query.extra_select or any(isinstance(c, ExtraWhere)
for c in query.where.children):
sql, params = compiler.as_sql()
connection = connections[compiler.using]
full_sql = (sql % params)
tables.update(_get_tables_from_sql(connection, full_sql))
return tables
def _get_table_cache_keys(compiler):
using = compiler.using
return [_get_table_cache_key(using, t) for t in _get_tables(compiler)]
if query.extra_select or any([isinstance(c, ExtraWhere)
for c in query.where.children]):
sql = compiler.as_sql()[0].lower()
connection = connections[using]
additional_tables = _get_tables_from_sql(connection, sql)
tables.update(additional_tables)
return [_get_table_cache_key(using, t) for t in tables]
def _invalidate_table_cache_keys(cache, table_cache_keys):
if hasattr(cache, 'to_be_invalidated'):
cache.to_be_invalidated.update(table_cache_keys)
now = time()
cache.set_many(dict((k, now) for k in table_cache_keys), None)
d = {}
for k in table_cache_keys:
d[k] = now
cache.set_many(d, None)
def _invalidate_tables(cache, compiler):