Hi all, in a (Flask) web application I often find that many equal (SQLAlchemy) queries are executed across subsequent requests. So I tried to cache the results of those queries on the module level like this:
@lru_cache() def query_db(db, args): # do the "expensive" query return result This obviously doesn't work because each request uses a new database session, so the db argument always changes from one call to the next, triggering a new query against the database. But even if that weren't so, the function would keep returning the same value forever (unless it's kicked out of the cache) and not reflect the (infrequent) changes on the database. So what I need is some decorator that can be used like this: @lru_ignore_first(timeout=10) def query_db(db, args): # do the "expensive" query return result This is what I came up with. I'm quite happy with it so far. Question: Am I being too clever? is it too complicated? Am I overlooking something that will come back and bite me later? Thanks for any comments! from functools import wraps, lru_cache from time import time, sleep def lru_ignore_first(timeout=0, **lru_args): class TimeCloak(): '''All instances compare equal until timeout expires''' __slots__ = ('x', 't', 'timeout') def __init__(self, timeout): self.timeout = timeout self.t = 0 self.x = None def __hash__(self): return self.t def __eq__(self, other): return self.t == other.t def update(self, x): self.x = x if self.timeout: t = int(time()) if t >= self.t + self.timeout: self.t = t cloak = TimeCloak(timeout) def decorator(func): @lru_cache(**lru_args) def worker(cloak, *a, **b): return func(cloak.x, *a, **b) @wraps(func) def wrapped(first, *a, **kw): cloak.update(first) return worker(cloak, *a, **kw) return wrapped return decorator @lru_ignore_first(3) def expensive(first, par): '''This takes a long time''' print('Expensive:', first, par) return par * 2 for i in range(10): r = expensive(i, 100) sleep(1) print(r) -- https://mail.python.org/mailman/listinfo/python-list