4322: Upgrading a large database to new version runs out of memory; increase locktable size

svn: r17364
This commit is contained in:
Michiel Nauta 2011-05-01 06:53:01 +00:00
parent f752fdcfcc
commit f9f932e568
2 changed files with 7 additions and 2 deletions

View File

@ -59,8 +59,8 @@ DBMODE_W = "w" # Full Reaw/Write access
DBPAGE = 16384 # Size of the pages used to hold items in the database
DBMODE = 0666 # Unix mode for database creation
DBCACHE = 0x4000000 # Size of the shared memory buffer pool
DBLOCKS = 25000 # Maximum number of locks supported
DBOBJECTS = 25000 # Maximum number of simultaneously locked objects
DBLOCKS = 100000 # Maximum number of locks supported
DBOBJECTS = 100000 # Maximum number of simultaneously locked objects
DBUNDO = 1000 # Maximum size of undo buffer
import config

View File

@ -1029,6 +1029,11 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
self.transaction_abort(self.transaction)
self.env.txn_checkpoint()
lockstats = self.env.lock_stat()
_LOG.debug("lock occupancy: %d%%, locked object occupancy: %d%%" % (
round(lockstats['maxnlocks']*100/lockstats['maxlocks']),
round(lockstats['maxnobjects']*100/lockstats['maxobjects'])))
self.__close_metadata()
self.name_group.close()
self.surnames.close()