Skip to content
This repository has been archived by the owner on Jun 11, 2019. It is now read-only.

Commit

Permalink
Bug 957382: remove r/w locks on buildapi cache; r=catlee
Browse files Browse the repository at this point in the history
  • Loading branch information
djmitche committed Jan 22, 2014
1 parent 4b92057 commit 8cee8e4
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 60 deletions.
50 changes: 4 additions & 46 deletions buildapi/lib/cacher.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,35 +18,16 @@ def get(self, key, func, args=None, kwargs=None, expire=0, lock_time=600):
if kwargs is None:
kwargs = {}

# note that in a "thundering herd" situation, this may generate the
# same cached value several times. That's OK.
try:
readlock = "readlocks:%s" % key
log.debug("Getting read lock %s", readlock)
self._getlock(readlock, expire=time.time()+lock_time)
log.debug("Got read lock %s", readlock)

try:
retval = self._get(key)
self._releaselock(readlock)
return retval
return self._get(key)
except KeyError:
pass
except:
self._releaselock(readlock)
raise

writelock = "writelocks:%s" % key
log.debug("Getting write lock %s", writelock)
self._getlock(writelock, expire=time.time()+lock_time)
log.debug("Got write lock %s", writelock)

try:
retval = func(*args, **kwargs)
self._put(key, retval, expire)
return retval
finally:
self._releaselock(readlock)
self._releaselock(writelock)
except:
except Exception:
log.exception("Problem with cache!")
return func(*args, **kwargs)

Expand Down Expand Up @@ -82,18 +63,6 @@ def _put(self, key, val, expire=0):
def has_key(self, key):
return self.r.exists(key)

def _getlock(self, key, expire):
if not hasattr(self.local, 'locks'):
self.local.locks = {}
assert key not in self.local.locks
l = redis.client.Lock(self.r, key, timeout=int(expire-time.time()))
l.acquire()
self.local.locks[key] = l

def _releaselock(self, key):
self.local.locks[key].release()
del self.local.locks[key]

except ImportError:
pass

Expand Down Expand Up @@ -121,16 +90,5 @@ def _put(self, key, val, expire=0):
def has_key(self, key):
return self.m.get(key) is not None

def _getlock(self, key, expire):
# try repeatedly to add the key, which will fail if the key
# already exists, until we are the one to add it
delay = 0.001
while not self.m.add(key, '', expire):
time.sleep(delay)
delay = min(delay * 1.1, 1)

def _releaselock(self, key):
self.m.delete(key)

except ImportError:
pass
24 changes: 10 additions & 14 deletions buildapi/lib/test_cacher.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import threading
import time
import mock
from buildapi.lib import cacher
from unittest import TestCase
Expand Down Expand Up @@ -33,27 +32,24 @@ def test_has_key(self):
self.assertFalse(self.c.has_key('not-there'))
self.assertTrue(self.c.has_key('there'))

def test_race(self):
# this is to test a thundering-herd problem when a key is missing and
# multiple gets occur at the same time. It uses a "slow" function to
# generate the result, and then just throws a lot of threads at it.
# This will either fail "sometimes" or not at all, unfortunately.
# Testing for race conditions is hard, since they are inherently
# sometimes harmless.
calls = []
def generate(thd):
calls.append(thd)
time.sleep(0.1)
def test_parallel_calls(self):
# test that parallel calls to get in different cachers work and return
# appropriate results. Note that generate() may be called once or ten
# times - we don't care.
results = {}
def generate():
print "gen"
return 'result'
def get(thd):
c = self.newCache() if thd else self.c
c.get('not-there', generate, args=(thd,), lock_time=2)
results[thd] = c.get('not-there', generate, lock_time=2)
thds = [ threading.Thread(target=get, args=(i,))
for i in range(10) ]
for thd in thds:
thd.start()
for thd in thds:
thd.join()
self.assertEqual(len(calls), 1, calls)
self.assertEqual(results, dict((i, 'result') for i in range(10)))

# TODO: lists?

Expand Down

0 comments on commit 8cee8e4

Please sign in to comment.