diff options
| author | bwarsaw | 2001-03-26 07:21:32 +0000 |
|---|---|---|
| committer | bwarsaw | 2001-03-26 07:21:32 +0000 |
| commit | 67ea49e9b7101010ae3576d9f5b6fd96e9d5798d (patch) | |
| tree | d7d62da525f0bb305fff0d7cb23d88bcd707a368 /Mailman/Pending.py | |
| parent | 0d4693e1d060d2742f6f45b232a07a5c5bdc5a75 (diff) | |
| download | mailman-67ea49e9b7101010ae3576d9f5b6fd96e9d5798d.tar.gz mailman-67ea49e9b7101010ae3576d9f5b6fd96e9d5798d.tar.zst mailman-67ea49e9b7101010ae3576d9f5b6fd96e9d5798d.zip | |
Rewritten and simplified. There's no need to make the logic live
inside a class; use the module as if a singleton instance.
The pending database is now stored in data/pending.db.
new(): Stores a new cookie in the pending.db with the key an sha
hexdigest based on the current time, a random number and the content.
The timestamp is now the point in the future at which this entry can
be evicted.
confirm(): Given a cookie, return the data (with the timestamp
stripped off), or None if the cookie is missing from the database.
_load(), _save(): Rewritten and simplified low-level marshal/unmarshal
of the database. Assumes lock is acquired.
_update(): For use by the bin/update script to convert from
pending_subscription.db to pending.db (the old keys ought to be
compatible).
Diffstat (limited to 'Mailman/Pending.py')
| -rw-r--r-- | Mailman/Pending.py | 207 |
1 files changed, 100 insertions, 107 deletions
diff --git a/Mailman/Pending.py b/Mailman/Pending.py index 11944eb4d..96c74511f 100644 --- a/Mailman/Pending.py +++ b/Mailman/Pending.py @@ -1,4 +1,4 @@ -# Copyright (C) 1998,1999,2000 by the Free Software Foundation, Inc. +# Copyright (C) 1998,1999,2000,2001 by the Free Software Foundation, Inc. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License @@ -16,121 +16,114 @@ """ Track pending confirmation of subscriptions. -Pending().new(stuff...) places an item's data in the db, returning its cookie. -Pending().confirmed(cookie) returns a tuple for the data, removing the item +new(stuff...) places an item's data in the db, returning its cookie. + +confirmed(cookie) returns a tuple for the data, removing the item from the db. It returns None if the cookie is not registered. """ -import os -import marshal +import os import time -import whrandom -import mm_cfg -import LockFile +import sha +import marshal +import random +import errno -DB_PATH = os.path.join(mm_cfg.DATA_DIR, "pending_subscriptions.db") -LOCK_PATH = os.path.join(mm_cfg.LOCK_DIR, "pending_subscriptions.lock") -PENDING_REQUEST_LIFE = mm_cfg.PENDING_REQUEST_LIFE -# Something's probably wedged if we hit this. -DB_LOCK_TIMEOUT = 30 -# Cull stale items from the db on save, after enough time since the last one: -CULL_INTERVAL = (mm_cfg.PENDING_REQUEST_LIFE / 10) +from Mailman import mm_cfg +from Mailman import LockFile -class Pending: - """Db interface for tracking pending confirmations, using random cookies. +DBFILE = os.path.join(mm_cfg.DATA_DIR, 'pending.db') +LOCKFILE = os.path.join(mm_cfg.LOCK_DIR, 'pending.lock') - .new(stuff...) places an item's data in the db, returning its cookie. - .confirmed(cookie) returns a tuple for the data, removing item from db. - The db is occasionally culled for stale items during saves.""" - # The db is a marshalled dict with two kinds of entries; a bunch of: - # cookie: (content..., timestamp) - # and just one: - # LAST_CULL_KEY: next_cull_due_time - # Dbs lacking the LAST_CULL_KEY are culled, at which point the cull key - # is added. - LAST_CULL_KEY = "lastculltime" - def __init__(self, - db_path = DB_PATH, - lock_path = LOCK_PATH, - item_life = PENDING_REQUEST_LIFE, - cull_interval = CULL_INTERVAL, - db_lock_timeout = DB_LOCK_TIMEOUT): - self.item_life = item_life - self.db_path = db_path - self.__lock = LockFile.LockFile(lock_path) - self.cull_interval = cull_interval - self.db_lock_timeout = db_lock_timeout - def new(self, *content): - """Create a new entry in the pending db, returning cookie for it.""" - now = int(time.time()) - db = self.__load() - # Generate cookie between 1e5 and 1e6 and not already in the db. + +def new(*content): + """Create a new entry in the pending database, returning cookie for it.""" + # Acquire the pending database lock, letting TimeOutError percolate up. + lock = LockFile.LockFile(LOCKFILE) + lock.lock(timeout=30) + try: + # Load the current database + db = _load() + # Calculate a unique cookie while 1: - newcookie = int(whrandom.random() * 1e6) - if newcookie >= 1e5 and not db.has_key(newcookie): + n = random.random() + now = time.time() + hashfood = str(now) + str(n) + str(content) + cookie = sha.new(hashfood).hexdigest() + if not db.has_key(cookie): break - db[newcookie] = content + (now,) # Tack on timestamp. - self.__save(db) - return newcookie - def confirmed(self, cookie): - "Return entry for cookie, removing it from db, or None if not found." - content = None - got = None - db = self.__load() - try: - if db.has_key(cookie): - content = db[cookie][0:-1] # Strip off timestamp. - got = 1 - del db[cookie] - finally: - if got: - self.__save(db) - else: - self.__release_lock() - return content - def __load(self): - "Return db as dict, returning an empty one if db not yet existant." - self.__assert_lock(self.db_lock_timeout) - try: - fp = open(self.db_path,"r" ) - return marshal.load(fp) - except IOError: - # Not yet existing Initialize a fresh one: - return {self.LAST_CULL_KEY: int(time.time())} - def __save(self, db): - """Marshal dict db to file - the exception is propagated on failure. - Cull stale items from the db, if that hasn't been done in a while.""" - if not self.__lock.locked(): - raise LockFile.NotLockedError - # Cull if its been a while (or if cull key is missing, ie, old - # version - which will be reformed to new format by cull). - if (db.get(self.LAST_CULL_KEY, 0) - < int(time.time()) - self.cull_interval): - self.__cull_db(db) - fp = open(self.db_path, "w") - marshal.dump(db, fp) + # Store the content, plus the time in the future when this entry will + # be evicted from the database, due to staleness. + db[cookie] = content + (now + mm_cfg.PENDING_REQUEST_LIFE,) + _save(db) + return cookie + finally: + lock.unlock() + + + +def confirm(cookie): + """Return data for cookie, removing it from db, or None if not found.""" + # Acquire the pending database lock, letting TimeOutError percolate up. + lock = LockFile.LockFile(LOCKFILE) + lock.lock(timeout=30) + try: + # Load the database + db = _load() + missing = [] + content = db.get(cookie, missing) + if content is missing: + return None + # Remove the entry from the database + del db[cookie] + _save(db) + # Strip off the timestamp and return the data + return content[:-1] + finally: + lock.unlock() + + + +def _load(): + # Lock must be acquired. + try: + fp = open(DBFILE) + return marshal.load(fp) + except IOError, e: + if e.errno <> errno.ENOENT: raise + # No database yet, so initialize a fresh one + return {} + + +def _save(db): + # Lock must be acquired. + now = time.time() + for cookie, data in db.items(): + timestamp = data[-1] + if now > timestamp: + # The entry is stale, so remove it. + del db[cookie] + omask = os.umask(007) + try: + fp = open(DBFILE, 'w') + marshal.dump(db, fp) fp.close() - self.__release_lock() - def __assert_lock(self, timeout): - """Get the lock if not already acquired, or happily just keep it. + finally: + os.umask(omask) + - Raises TimeOutError if unable to get lock within timeout.""" - try: - self.__lock.lock(timeout) - except LockFile.AlreadyLockedError: - pass - def __release_lock(self): - self.__lock.unlock() - def __cull_db(self, db): - """Remove old items from db and revise last-culled timestamp.""" - now = int(time.time()) - too_old = now - self.item_life - cullkey = self.LAST_CULL_KEY - for k, v in db.items(): - if k == cullkey: - continue - if v[-1] < too_old: - del db[k] - # Register time after which a new cull is due: - db[self.LAST_CULL_KEY] = now + +def _update(olddb): + # Update an old pending database to the new database + lock = LockFile.LockFile(LOCKFILE) + lock.lock(timeout=30) + try: + # We don't need this entry anymore + if olddb.has_key('lastculltime'): + del olddb['lastculltime'] + db = _load() + db.update(olddb) + _save(db) + finally: + lock.unlock() |
