Merge branch 'master' of github.com:gramps-project/gramps
Danish translation submited in da.po
This commit is contained in:
commit
5578a987bc
@ -52,14 +52,9 @@ from gramps.gen.db.utils import make_database
|
||||
from gramps.gen.errors import DbError
|
||||
from gramps.gen.dbstate import DbState
|
||||
from gramps.gen.db.exceptions import (DbUpgradeRequiredError,
|
||||
BsddbDowngradeError,
|
||||
DbSupportedError,
|
||||
DbVersionError,
|
||||
DbPythonError,
|
||||
DbEnvironmentError,
|
||||
BsddbUpgradeRequiredError,
|
||||
BsddbDowngradeRequiredError,
|
||||
PythonUpgradeRequiredError,
|
||||
PythonDowngradeError,
|
||||
DbConnectionError)
|
||||
from gramps.gen.plug import BasePluginManager
|
||||
from gramps.gen.utils.config import get_researcher
|
||||
@ -176,34 +171,8 @@ class CLIDbLoader:
|
||||
try:
|
||||
self.dbstate.db.load(filename, self._pulse_progress, mode,
|
||||
username=username, password=password)
|
||||
except DbEnvironmentError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except BsddbUpgradeRequiredError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except BsddbDowngradeRequiredError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except BsddbDowngradeError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except DbUpgradeRequiredError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except PythonDowngradeError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except PythonUpgradeRequiredError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except DbVersionError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except DbPythonError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except DbConnectionError as msg:
|
||||
except (DbConnectionError, DbSupportedError, DbUpgradeRequiredError,
|
||||
DbVersionError, DbPythonError, DbConnectionError) as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(_("Cannot open database"), str(msg))
|
||||
except OSError as msg:
|
||||
|
@ -369,3 +369,5 @@ if not os.path.exists(CONFIGMAN.filename):
|
||||
CONFIGMAN.load()
|
||||
|
||||
config = CONFIGMAN
|
||||
if config.get('database.backend') == 'bsddb':
|
||||
config.set('database.backend', 'sqlite')
|
||||
|
@ -122,121 +122,6 @@ class DbPythonError(Exception):
|
||||
'min_vers': self.min_vers,
|
||||
'max_vers': self.max_vers}
|
||||
|
||||
class BsddbDowngradeError(Exception):
|
||||
"""
|
||||
Error used to report that the Berkeley database used to create the family
|
||||
tree is of a version that is too new to be supported by the current version.
|
||||
"""
|
||||
def __init__(self, env_version, bdb_version):
|
||||
Exception.__init__(self)
|
||||
self.env_version = str(env_version)
|
||||
self.bdb_version = str(bdb_version)
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load is in the Bsddb '
|
||||
'version %(env_version)s format. This version of Gramps uses '
|
||||
'Bsddb version %(bdb_version)s. So you are trying to load '
|
||||
'data created in a newer format into an older program, and '
|
||||
'this is bound to fail.\n\n'
|
||||
'You should start your '
|
||||
'%(bold_start)snewer%(bold_end)s version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree. You can then import '
|
||||
'this backup into this version of Gramps.') % {
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'html_end' : '</a>' ,
|
||||
'bold_start' : '<b>' ,
|
||||
'bold_end' : '</b>' ,
|
||||
'env_version' : self.env_version,
|
||||
'bdb_version' : self.bdb_version }
|
||||
|
||||
class BsddbDowngradeRequiredError(Exception):
|
||||
"""
|
||||
Error used to report that the Berkeley database used to create the family
|
||||
tree is of a version that is newer than the current version, but it may be
|
||||
possible to open the tree, because the difference is only a point upgrade
|
||||
(i.e. a difference in the last digit of the version tuple).
|
||||
"""
|
||||
def __init__(self, env_version, bdb_version):
|
||||
Exception.__init__(self)
|
||||
self.env_version = str(env_version)
|
||||
self.bdb_version = str(bdb_version)
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load is in the Bsddb '
|
||||
'version %(env_version)s format. This version of Gramps uses '
|
||||
'Bsddb version %(bdb_version)s. So you are trying to load '
|
||||
'data created in a newer format into an older program. In '
|
||||
'this particular case, the difference is very small, so it '
|
||||
'may work.\n\n'
|
||||
'If you have not already made a backup of your Family Tree, '
|
||||
'then you should start your '
|
||||
'%(bold_start)snewer%(bold_end)s version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree.') % {
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'html_end' : '</a>' ,
|
||||
'bold_start' : '<b>' ,
|
||||
'bold_end' : '</b>' ,
|
||||
'env_version' : self.env_version,
|
||||
'bdb_version' : self.bdb_version }
|
||||
|
||||
class BsddbUpgradeRequiredError(Exception):
|
||||
"""
|
||||
Error used to report that the Berkeley database used to create the family
|
||||
tree is of a version that is too new to be supported by the current version.
|
||||
"""
|
||||
def __init__(self, env_version, bsddb_version):
|
||||
Exception.__init__(self)
|
||||
self.env_version = str(env_version)
|
||||
self.bsddb_version = str(bsddb_version)
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load is in the Bsddb '
|
||||
'version %(env_version)s format. This version of Gramps uses '
|
||||
'Bsddb version %(bdb_version)s. Therefore you cannot load '
|
||||
'this Family Tree without upgrading the Bsddb version of the '
|
||||
'Family Tree.\n\n'
|
||||
'Opening the Family Tree with this version of Gramps might '
|
||||
'irretrievably corrupt your Family Tree. You are strongly '
|
||||
'advised to backup your Family Tree.\n\n'
|
||||
'If you have not already made a backup of your Family Tree, '
|
||||
'then you should start your %(bold_start)sold%(bold_end)s '
|
||||
'version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree.') % {
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'html_end' : '</a>' ,
|
||||
'bold_start' : '<b>' ,
|
||||
'bold_end' : '</b>' ,
|
||||
'env_version' : self.env_version,
|
||||
'bdb_version' : self.bsddb_version }
|
||||
|
||||
class DbEnvironmentError(Exception):
|
||||
"""
|
||||
Error used to report that the database 'environment' could not be opened.
|
||||
Most likely, the database was created by a different version of the underlying database engine.
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
Exception.__init__(self)
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return (_("Gramps has detected a problem in opening the 'environment' "
|
||||
"of the underlying Berkeley database used to store this "
|
||||
"Family Tree. The most likely cause "
|
||||
"is that the database was created with an old version "
|
||||
"of the Berkeley database program, "
|
||||
"and you are now using a new version. "
|
||||
"It is quite likely that your database has not been "
|
||||
"changed by Gramps.\nIf possible, you should revert to your "
|
||||
"old version of Gramps and its support software; export "
|
||||
"your database to XML; close the database; "
|
||||
"then upgrade again "
|
||||
"to this version of Gramps and import the XML file "
|
||||
"in an empty Family Tree. Alternatively, it may be possible "
|
||||
"to use the Berkeley database recovery tools.")
|
||||
+ '\n\n' + str(self.msg))
|
||||
|
||||
class DbUpgradeRequiredError(Exception):
|
||||
"""
|
||||
@ -275,73 +160,6 @@ class DbUpgradeRequiredError(Exception):
|
||||
'oldschema' : self.oldschema,
|
||||
'newschema' : self.newschema }
|
||||
|
||||
class PythonDowngradeError(Exception):
|
||||
"""
|
||||
Error used to report that the Python version used to create the family tree
|
||||
(i.e. Python3) is a version that is newer than the current version
|
||||
(i.e. Python2), so the Family Tree cannot be opened
|
||||
"""
|
||||
def __init__(self, db_python_version, current_python_version):
|
||||
Exception.__init__(self)
|
||||
self.db_python_version = str(db_python_version)
|
||||
self.current_python_version = str(current_python_version)
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load was created with '
|
||||
'Python version %(db_python_version)s. This version of Gramps '
|
||||
'uses Python version %(current_python_version)s. So you are '
|
||||
'trying to load '
|
||||
'data created in a newer format into an older program, and '
|
||||
'this is bound to fail.\n\n'
|
||||
'You should start your '
|
||||
'%(bold_start)snewer%(bold_end)s version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree. You can then import '
|
||||
'this backup into this version of Gramps.') % {
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'html_end' : '</a>' ,
|
||||
'bold_start' : '<b>' ,
|
||||
'bold_end' : '</b>' ,
|
||||
'db_python_version': self.db_python_version,
|
||||
'current_python_version': self.current_python_version }
|
||||
|
||||
class PythonUpgradeRequiredError(Exception):
|
||||
"""
|
||||
Error used to report that the Python version used to create the family tree
|
||||
(i.e. Python2) is earlier than the current Python version (i.e. Python3), so
|
||||
the Family Tree needs to be upgraded.
|
||||
"""
|
||||
def __init__(self, db_python_version, current_python_version):
|
||||
Exception.__init__(self)
|
||||
self.db_python_version = str(db_python_version)
|
||||
self.current_python_version = str(current_python_version)
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load is in the Python '
|
||||
'version %(db_python_version)s format. This version of Gramps '
|
||||
'uses Python version %(current_python_version)s. Therefore '
|
||||
'you cannot load this Family Tree without upgrading the '
|
||||
'Python version of the Family Tree.\n\n'
|
||||
'If you upgrade then you won\'t be able to use the previous '
|
||||
'version of Gramps, even if you subsequently '
|
||||
'%(wiki_manual_backup_html_start)sbackup%(html_end)s or '
|
||||
'%(wiki_manual_export_html_start)sexport%(html_end)s '
|
||||
'your upgraded Family Tree.\n\n'
|
||||
'Upgrading is a difficult task which could irretrievably '
|
||||
'corrupt your Family Tree if it is interrupted or fails.\n\n'
|
||||
'If you have not already made a backup of your Family Tree, '
|
||||
'then you should start your %(bold_start)sold%(bold_end)s '
|
||||
'version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree.') % {
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'wiki_manual_backup_html_start' : URL_BACKUP2_START ,
|
||||
'wiki_manual_export_html_start' : URL_EXPORT_START ,
|
||||
'html_end' : '</a>' ,
|
||||
'bold_start' : '<b>' ,
|
||||
'bold_end' : '</b>' ,
|
||||
'db_python_version': self.db_python_version,
|
||||
'current_python_version': self.current_python_version }
|
||||
|
||||
class DbConnectionError(Exception):
|
||||
"""
|
||||
@ -360,6 +178,35 @@ class DbConnectionError(Exception):
|
||||
'message': self.msg,
|
||||
'settings_file': self.settings_file}
|
||||
|
||||
|
||||
class DbSupportedError(Exception):
|
||||
"""
|
||||
Error used to report that a database is no longer supported.
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
Exception.__init__(self)
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return _('The Family Tree you are trying to load is in the %(dbtype)s '
|
||||
'database, which is no longer supported.\nTherefore you '
|
||||
'cannot load this Family Tree without upgrading.\n\n'
|
||||
'If you upgrade then you won\'t be able to use the previous '
|
||||
'version of Gramps, even if you subsequently '
|
||||
'%(wiki_manual_backup_html_start)sbackup%(html_end)s or '
|
||||
'%(wiki_manual_export_html_start)sexport%(html_end)s '
|
||||
'your upgraded Family Tree.\n\n'
|
||||
'You are strongly advised to backup your Family Tree.\n\n'
|
||||
'If you have not already made a backup of your Family Tree, '
|
||||
'then you should start your previous version of Gramps and '
|
||||
'%(wiki_backup_html_start)smake a backup%(html_end)s '
|
||||
'of your Family Tree.') % {
|
||||
'dbtype' : self.msg,
|
||||
'wiki_manual_backup_html_start' : URL_BACKUP2_START ,
|
||||
'wiki_manual_export_html_start' : URL_EXPORT_START ,
|
||||
'wiki_backup_html_start' : URL_BACKUP1_START ,
|
||||
'html_end' : '</a>'}
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
Call this from the CLI (in order to find the imported modules):
|
||||
@ -370,20 +217,8 @@ if __name__ == "__main__":
|
||||
|
||||
print("\nDbVersionError:\n",
|
||||
DbVersionError('1.6.0', '1.5.0', '1.5.1'))
|
||||
print("\nBsddbDowngradeError:\n",
|
||||
BsddbDowngradeError('4.8.30', '4.8.29'))
|
||||
print("\nBsddbDowngradeRequiredError:\n",
|
||||
BsddbDowngradeRequiredError('4.8.30', '4.8.29'))
|
||||
print("\nBsddbUpgradeRequiredError:\n",
|
||||
BsddbUpgradeRequiredError('4.8.29', '4.8.30'))
|
||||
print("\nDbEnvironmentError:\n",
|
||||
DbEnvironmentError('test message'))
|
||||
print("\nDbUpgradeRequiredError:\n",
|
||||
DbUpgradeRequiredError('1.5.1', '1.6.0'))
|
||||
print("\nPythonDowngradeError:\n",
|
||||
PythonDowngradeError('3', '2'))
|
||||
print("\nPythonUpgradeRequiredError:\n",
|
||||
PythonUpgradeRequiredError('2', '3'))
|
||||
sys.exit(0)
|
||||
|
||||
print("\nxxx:\n",
|
||||
|
@ -47,6 +47,7 @@ from . import (DbReadBase, DbWriteBase, DbUndo, DBLOGNAME, DBUNDOFN,
|
||||
REPOSITORY_KEY, NOTE_KEY, TAG_KEY, TXNADD, TXNUPD, TXNDEL,
|
||||
KEY_TO_NAME_MAP, DBMODE_R, DBMODE_W)
|
||||
from .utils import write_lock_file, clear_lock_file
|
||||
from .exceptions import DbVersionError, DbUpgradeRequiredError
|
||||
from ..errors import HandleError
|
||||
from ..utils.callback import Callback
|
||||
from ..updatecallback import UpdateCallback
|
||||
@ -311,7 +312,7 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback):
|
||||
|
||||
__callback_map = {}
|
||||
|
||||
VERSION = (18, 0, 0)
|
||||
VERSION = (20, 0, 0)
|
||||
|
||||
def __init__(self, directory=None):
|
||||
DbReadBase.__init__(self)
|
||||
@ -659,6 +660,21 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback):
|
||||
|
||||
self.db_is_open = True
|
||||
|
||||
# Check on db version to see if we need upgrade or too new
|
||||
dbversion = int(self._get_metadata('version', default='0'))
|
||||
if dbversion > self.VERSION[0]:
|
||||
self.close()
|
||||
raise DbVersionError(dbversion, 18, self.VERSION[0])
|
||||
|
||||
if not self.readonly and dbversion < self.VERSION[0]:
|
||||
LOG.debug("Schema upgrade required from %s to %s",
|
||||
dbversion, self.VERSION[0])
|
||||
if force_schema_upgrade:
|
||||
self._gramps_upgrade(dbversion, directory, callback)
|
||||
else:
|
||||
self.close()
|
||||
raise DbUpgradeRequiredError(dbversion, self.VERSION[0])
|
||||
|
||||
def _close(self):
|
||||
"""
|
||||
Close database backend.
|
||||
@ -2463,3 +2479,47 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback):
|
||||
enclosed_by = placeref.ref
|
||||
break
|
||||
return enclosed_by
|
||||
|
||||
def _gramps_upgrade(self, version, directory, callback=None):
|
||||
"""
|
||||
Here we do the calls for stepwise schema upgrades.
|
||||
We assume that we need to rebuild secondary and reference maps.
|
||||
"""
|
||||
UpdateCallback.__init__(self, callback)
|
||||
|
||||
start = time.time()
|
||||
|
||||
from gramps.gen.db.upgrade import (
|
||||
gramps_upgrade_14, gramps_upgrade_15, gramps_upgrade_16,
|
||||
gramps_upgrade_17, gramps_upgrade_18, gramps_upgrade_19,
|
||||
gramps_upgrade_20)
|
||||
|
||||
if version < 14:
|
||||
gramps_upgrade_14(self)
|
||||
if version < 15:
|
||||
gramps_upgrade_15(self)
|
||||
if version < 16:
|
||||
gramps_upgrade_16(self)
|
||||
if version < 17:
|
||||
gramps_upgrade_17(self)
|
||||
if version < 18:
|
||||
gramps_upgrade_18(self)
|
||||
if version < 19:
|
||||
gramps_upgrade_19(self)
|
||||
if version < 20:
|
||||
gramps_upgrade_20(self)
|
||||
|
||||
self.rebuild_secondary(callback)
|
||||
self.reindex_reference_map(callback)
|
||||
self.reset()
|
||||
|
||||
self.set_schema_version(self.VERSION[0])
|
||||
LOG.debug("Upgrade time: %d seconds" % int(time.time() - start))
|
||||
|
||||
def get_schema_version(self):
|
||||
""" Return current schema version as an int """
|
||||
return int(self._get_metadata('version', default='0'))
|
||||
|
||||
def set_schema_version(self, value):
|
||||
""" set the current schema version """
|
||||
self._set_metadata('version', str(value))
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -58,7 +58,7 @@ class FileTest(unittest.TestCase):
|
||||
"""
|
||||
|
||||
# Create database
|
||||
db = make_database("bsddb")
|
||||
db = make_database("sqlite")
|
||||
path = get_empty_tempdir("utils_file_test")
|
||||
db.load(path)
|
||||
|
||||
|
@ -1809,6 +1809,8 @@ class GrampsPreferences(ConfigureDialog):
|
||||
for plugin in sorted(backend_plugins, key=lambda plugin: plugin.name):
|
||||
if plugin.id == default:
|
||||
active = count
|
||||
if plugin.id == 'bsddb':
|
||||
continue # bsddb is deprecated, so don't allow setting
|
||||
model.append(row=[count, plugin.name, plugin.id])
|
||||
count += 1
|
||||
obox.set_model(model)
|
||||
|
@ -54,18 +54,14 @@ from gi.repository import GObject
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.db.dbconst import DBBACKEND
|
||||
from gramps.gen.db.utils import make_database
|
||||
from gramps.gen.db.upgrade import make_zip_backup
|
||||
_ = glocale.translation.gettext
|
||||
from gramps.cli.grampscli import CLIDbLoader
|
||||
from gramps.gen.config import config
|
||||
from gramps.gen.db.exceptions import (DbUpgradeRequiredError,
|
||||
BsddbDowngradeError,
|
||||
DbVersionError,
|
||||
DbPythonError,
|
||||
DbEnvironmentError,
|
||||
BsddbUpgradeRequiredError,
|
||||
BsddbDowngradeRequiredError,
|
||||
PythonUpgradeRequiredError,
|
||||
PythonDowngradeError,
|
||||
DbSupportedError,
|
||||
DbConnectionError)
|
||||
from .pluginmanager import GuiPluginManager
|
||||
from .dialog import (DBErrorDialog, ErrorDialog, QuestionDialog2,
|
||||
@ -163,130 +159,70 @@ class DbLoader(CLIDbLoader):
|
||||
if not os.access(filename, os.W_OK):
|
||||
mode = "r"
|
||||
self._warn(_('Read only database'),
|
||||
_('You do not have write access '
|
||||
'to the selected file.'))
|
||||
_('You do not have write access '
|
||||
'to the selected file.'))
|
||||
else:
|
||||
mode = "w"
|
||||
else:
|
||||
mode = 'w'
|
||||
|
||||
dbid_path = os.path.join(filename, DBBACKEND)
|
||||
if os.path.isfile(dbid_path):
|
||||
with open(dbid_path) as fp:
|
||||
dbid = fp.read().strip()
|
||||
else:
|
||||
dbid = "bsddb"
|
||||
|
||||
db = make_database(dbid)
|
||||
db.disable_signals()
|
||||
self.dbstate.no_database()
|
||||
|
||||
if db.requires_login() and username is None:
|
||||
login = GrampsLoginDialog(self.uistate)
|
||||
credentials = login.run()
|
||||
if credentials is None:
|
||||
return
|
||||
username, password = credentials
|
||||
|
||||
self._begin_progress()
|
||||
self.uistate.progress.show()
|
||||
self.uistate.pulse_progressbar(0)
|
||||
|
||||
force_schema_upgrade = False
|
||||
force_bsddb_upgrade = False
|
||||
force_bsddb_downgrade = False
|
||||
force_python_upgrade = False
|
||||
try:
|
||||
while True:
|
||||
dbid_path = os.path.join(filename, DBBACKEND)
|
||||
if os.path.isfile(dbid_path):
|
||||
with open(dbid_path) as fp:
|
||||
dbid = fp.read().strip()
|
||||
else:
|
||||
dbid = "bsddb"
|
||||
|
||||
db = make_database(dbid)
|
||||
db.disable_signals()
|
||||
if db.requires_login() and username is None:
|
||||
login = GrampsLoginDialog(self.uistate)
|
||||
credentials = login.run()
|
||||
if credentials is None:
|
||||
return
|
||||
username, password = credentials
|
||||
|
||||
try:
|
||||
db.load(filename, self._pulse_progress,
|
||||
db.load(filename, self.uistate.pulse_progressbar,
|
||||
mode, force_schema_upgrade,
|
||||
force_bsddb_upgrade,
|
||||
force_bsddb_downgrade,
|
||||
force_python_upgrade,
|
||||
username=username,
|
||||
password=password)
|
||||
if self.dbstate.is_open():
|
||||
self.dbstate.db.close(
|
||||
user=User(callback=self._pulse_progress,
|
||||
user=User(callback=self.uistate.pulse_progressbar,
|
||||
uistate=self.uistate,
|
||||
dbstate=self.dbstate))
|
||||
self.dbstate.change_database(db)
|
||||
break
|
||||
except DbUpgradeRequiredError as msg:
|
||||
if QuestionDialog2(_("Are you sure you want "
|
||||
except (DbSupportedError, DbUpgradeRequiredError) as msg:
|
||||
if(force_schema_upgrade or
|
||||
QuestionDialog2(_("Are you sure you want "
|
||||
"to upgrade this Family Tree?"),
|
||||
str(msg),
|
||||
_("I have made a backup,\n"
|
||||
"please upgrade my Family Tree"),
|
||||
_("Cancel"),
|
||||
parent=self.uistate.window).run():
|
||||
parent=self.uistate.window).run()):
|
||||
force_schema_upgrade = True
|
||||
force_bsddb_upgrade = False
|
||||
force_bsddb_downgrade = False
|
||||
force_python_upgrade = False
|
||||
else:
|
||||
self.dbstate.no_database()
|
||||
break
|
||||
except BsddbUpgradeRequiredError as msg:
|
||||
if QuestionDialog2(_("Are you sure you want "
|
||||
"to upgrade this Family Tree?"),
|
||||
str(msg),
|
||||
_("I have made a backup,\n"
|
||||
"please upgrade my Family Tree"),
|
||||
_("Cancel"),
|
||||
parent=self.uistate.window).run():
|
||||
force_schema_upgrade = False
|
||||
force_bsddb_upgrade = True
|
||||
force_bsddb_downgrade = False
|
||||
force_python_upgrade = False
|
||||
else:
|
||||
self.dbstate.no_database()
|
||||
break
|
||||
except BsddbDowngradeRequiredError as msg:
|
||||
if QuestionDialog2(_("Are you sure you want "
|
||||
"to downgrade this Family Tree?"),
|
||||
str(msg),
|
||||
_("I have made a backup,\n"
|
||||
"please downgrade my Family Tree"),
|
||||
_("Cancel"),
|
||||
parent=self.uistate.window).run():
|
||||
force_schema_upgrade = False
|
||||
force_bsddb_upgrade = False
|
||||
force_bsddb_downgrade = True
|
||||
force_python_upgrade = False
|
||||
else:
|
||||
self.dbstate.no_database()
|
||||
break
|
||||
except PythonUpgradeRequiredError as msg:
|
||||
if QuestionDialog2(_("Are you sure you want "
|
||||
"to upgrade this Family Tree?"),
|
||||
str(msg),
|
||||
_("I have made a backup,\n"
|
||||
"please upgrade my Family Tree"),
|
||||
_("Cancel"),
|
||||
parent=self.uistate.window).run():
|
||||
force_schema_upgrade = False
|
||||
force_bsddb_upgrade = False
|
||||
force_bsddb_downgrade = False
|
||||
force_python_upgrade = True
|
||||
make_zip_backup(filename)
|
||||
else:
|
||||
self.dbstate.no_database()
|
||||
break
|
||||
# Get here is there is an exception the while loop does not handle
|
||||
except BsddbDowngradeError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._warn( _("Cannot open database"), str(msg))
|
||||
except DbVersionError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except DbPythonError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except DbEnvironmentError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except PythonDowngradeError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._warn( _("Cannot open database"), str(msg))
|
||||
except DbConnectionError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._warn(_("Cannot open database"), str(msg))
|
||||
@ -300,7 +236,8 @@ class DbLoader(CLIDbLoader):
|
||||
except Exception as newerror:
|
||||
self.dbstate.no_database()
|
||||
self._dberrordialog(str(newerror))
|
||||
self._end_progress()
|
||||
|
||||
self.uistate.progress.hide()
|
||||
return True
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
|
@ -367,7 +367,7 @@ class DbManager(CLIDbManager, ManagedWindow):
|
||||
dbid = config.get('database.backend')
|
||||
backend_type = self.get_backend_name_from_dbid(dbid)
|
||||
if backend_type == UNAVAILABLE:
|
||||
dbid = 'bsddb'
|
||||
dbid = 'sqlite'
|
||||
config.set('database.backend', dbid)
|
||||
backend_type = self.get_backend_name_from_dbid(dbid)
|
||||
self.new_btn.set_tooltip_text(backend_type)
|
||||
@ -938,7 +938,7 @@ class DbManager(CLIDbManager, ManagedWindow):
|
||||
fname = os.path.join(dirname, filename)
|
||||
os.unlink(fname)
|
||||
|
||||
dbase = make_database("bsddb")
|
||||
dbase = make_database("sqlite")
|
||||
dbase.load(dirname, None)
|
||||
|
||||
self.__start_cursor(_("Rebuilding database from backup files"))
|
||||
|
@ -37,6 +37,7 @@ from gramps.gen.lib import (Person, Family, Event, Source, Place, Citation,
|
||||
from gramps.cli.user import User
|
||||
from gramps.gen.dbstate import DbState
|
||||
from gramps.gen.db.utils import make_database
|
||||
from gramps.gen.db import DbTxn
|
||||
from gramps.gui.editors.editreference import EditReference
|
||||
|
||||
class MockWindow:
|
||||
@ -56,7 +57,7 @@ class TestEditReference(unittest.TestCase):
|
||||
@unittest.skipUnless(MOCKING, "Requires unittest.mock to run")
|
||||
def test_editreference(self):
|
||||
dbstate = DbState()
|
||||
db = make_database("bsddb")
|
||||
db = make_database("sqlite")
|
||||
path = "/tmp/edit_ref_test"
|
||||
try:
|
||||
os.mkdir(path)
|
||||
@ -66,7 +67,8 @@ class TestEditReference(unittest.TestCase):
|
||||
dbstate.change_database(db)
|
||||
source = Place()
|
||||
source.gramps_id = "P0001"
|
||||
dbstate.db.place_map[source.handle] = source.serialize()
|
||||
with DbTxn("test place", dbstate.db) as trans:
|
||||
dbstate.db.add_place(source, trans)
|
||||
editor = MockEditReference(dbstate, uistate=None, track=[],
|
||||
source=source, source_ref=None, update=None)
|
||||
with patch('gramps.gui.editors.editreference.ErrorDialog') as MockED:
|
||||
|
@ -1,75 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
Gramps Database API.
|
||||
|
||||
Database Architecture
|
||||
=====================
|
||||
|
||||
Access to the database is made through Python classes. Exactly
|
||||
what functionality you have is dependent on the properties of the
|
||||
database. For example, if you are accessing a read-only view, then
|
||||
you will only have access to a subset of the methods available.
|
||||
|
||||
At the root of any database interface is either :py:class:`.DbReadBase` and/or
|
||||
:py:class:`.DbWriteBase`. These define the methods to read and write to a
|
||||
database, respectively.
|
||||
|
||||
The full database hierarchy is:
|
||||
|
||||
- :py:class:`.DbBsddb` - read and write implementation to BSDDB databases
|
||||
|
||||
* :py:class:`.DbWriteBase` - virtual and implementation-independent methods
|
||||
for reading data
|
||||
|
||||
* :py:class:`.DbBsddbRead` - read-only (accessors, getters) implementation
|
||||
to BSDDB databases
|
||||
|
||||
+ :py:class:`.DbReadBase` - virtual and implementation-independent
|
||||
methods for reading data
|
||||
|
||||
+ :py:class:`.Callback` - callback and signal functions
|
||||
|
||||
* :py:class:`.UpdateCallback` - callback functionality
|
||||
|
||||
DbBsddb
|
||||
=======
|
||||
|
||||
The :py:class:`.DbBsddb` interface defines a hierarchical database
|
||||
(non-relational) written in
|
||||
`PyBSDDB <http://www.jcea.es/programacion/pybsddb.htm>`_. There is no
|
||||
such thing as a database schema, and the meaning of the data is
|
||||
defined in the Python classes above. The data is stored as pickled
|
||||
tuples and unserialized into the primary data types (below).
|
||||
|
||||
More details can be found in the manual's
|
||||
`Using database API <http://www.gramps-project.org/wiki/index.php?title=Using_database_API>`_.
|
||||
"""
|
||||
|
||||
from gramps.gen.db.base import *
|
||||
from gramps.gen.db.dbconst import *
|
||||
from .cursor import *
|
||||
from .read import *
|
||||
from .bsddbtxn import *
|
||||
from gramps.gen.db.txn import *
|
||||
from .undoredo import *
|
||||
from gramps.gen.db.exceptions import *
|
||||
from .write import *
|
@ -26,7 +26,7 @@ plg.id = 'bsddb'
|
||||
plg.name = _("BSDDB")
|
||||
plg.name_accell = _("_BSDDB Database")
|
||||
plg.description = _("Berkeley Software Distribution Database Backend")
|
||||
plg.version = '1.0'
|
||||
plg.version = '2.0'
|
||||
plg.gramps_target_version = "5.1"
|
||||
plg.status = STABLE
|
||||
plg.fname = 'bsddb.py'
|
||||
|
@ -1,7 +1,8 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2015-2016 Douglas S. Blank <doug.blank@gmail.com>
|
||||
# Copyright (C) 2020 Paul Culley <paulr2787@gmail.com>
|
||||
# Copyright (C) 2020 Nick Hall
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@ -17,7 +18,246 @@
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
""" BSDDB upgrade module """
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Python Modules
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
import os
|
||||
import pickle
|
||||
import logging
|
||||
from bsddb3.db import DB, DB_DUP, DB_HASH, DB_RDONLY
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.plugins.db.dbapi.sqlite import SQLite
|
||||
from gramps.cli.clidbman import NAME_FILE, find_next_db_dir
|
||||
from gramps.gen.db.dbconst import DBBACKEND, DBMODE_W, SCHVERSFN
|
||||
from gramps.gen.db.exceptions import (DbException, DbSupportedError,
|
||||
DbUpgradeRequiredError, DbVersionError)
|
||||
from gramps.gen.db.utils import clear_lock_file
|
||||
from gramps.gen.lib import Researcher
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
LOG = logging.getLogger(".upgrade")
|
||||
_MINVERSION = 9
|
||||
_DBVERSION = 19
|
||||
|
||||
|
||||
from gramps.plugins.db.bsddb import DbBsddb
|
||||
class DbBsddb(SQLite):
|
||||
"""
|
||||
Gramps BSDDB Converter
|
||||
"""
|
||||
def __init__(self):
|
||||
"""Create a new GrampsDB."""
|
||||
|
||||
super().__init__()
|
||||
|
||||
def load(self, dirname, callback=None, mode=DBMODE_W,
|
||||
force_schema_upgrade=False,
|
||||
update=True,
|
||||
username=None, password=None):
|
||||
"""
|
||||
Here we create a sqlite db, and copy the bsddb into it.
|
||||
The new db is initially in a new directory, when we finish the copy
|
||||
we replace the contents of the original directory with the new db.
|
||||
|
||||
We alway raise an exception to complete this, as the new db still
|
||||
needs to be upgraded some more. When we raise the exception, the new
|
||||
db is closed.
|
||||
"""
|
||||
if not update:
|
||||
raise DbException("Not Available")
|
||||
if not force_schema_upgrade: # make sure user wants to upgrade
|
||||
raise DbSupportedError(_("BSDDB"))
|
||||
|
||||
UpdateCallback.__init__(self, callback)
|
||||
|
||||
# Here we open the dbapi db (a new one) for writing
|
||||
new_path = find_next_db_dir()
|
||||
os.mkdir(new_path)
|
||||
# store dbid in new dir
|
||||
dbid = 'sqlite'
|
||||
backend_path = os.path.join(new_path, DBBACKEND)
|
||||
with open(backend_path, "w", encoding='utf8') as backend_file:
|
||||
backend_file.write(dbid)
|
||||
|
||||
super().load(new_path, callback=None, mode='w',
|
||||
force_schema_upgrade=False,
|
||||
username=username, password=password)
|
||||
|
||||
# now read in the bsddb and copy to dpapi
|
||||
schema_vers = None
|
||||
total = 0
|
||||
tables = (
|
||||
('person', 'person'),
|
||||
('family', 'family'),
|
||||
('event', 'event'),
|
||||
('place', 'place'),
|
||||
('repo', 'repository'),
|
||||
('source', 'source'),
|
||||
('citation', 'citation'),
|
||||
('media', 'media'),
|
||||
('note', 'note'),
|
||||
('tag', 'tag'),
|
||||
('meta_data', 'metadata'))
|
||||
|
||||
# open each dbmap, and get its length for the total
|
||||
file_name = os.path.join(dirname, 'name_group.db')
|
||||
if os.path.isfile(file_name):
|
||||
name_group_dbmap = DB()
|
||||
name_group_dbmap.set_flags(DB_DUP)
|
||||
name_group_dbmap.open(file_name, 'name_group', DB_HASH, DB_RDONLY)
|
||||
total += len(name_group_dbmap)
|
||||
else:
|
||||
name_group_dbmap = None
|
||||
|
||||
table_list = []
|
||||
for old_t, new_t in (tables):
|
||||
|
||||
file_name = os.path.join(dirname, old_t + '.db')
|
||||
if not os.path.isfile(file_name):
|
||||
continue
|
||||
dbmap = DB()
|
||||
dbmap.open(file_name, old_t, DB_HASH, DB_RDONLY)
|
||||
total += len(dbmap)
|
||||
table_list.append((old_t, new_t, dbmap))
|
||||
|
||||
self.set_total(total)
|
||||
# copy data from each dbmap to sqlite table
|
||||
for old_t, new_t, dbmap in table_list:
|
||||
self._txn_begin()
|
||||
if new_t == 'metadata':
|
||||
sql = ("REPLACE INTO metadata (setting, value) VALUES "
|
||||
"(?, ?)")
|
||||
else:
|
||||
sql = ("INSERT INTO %s (handle, blob_data) VALUES "
|
||||
"(?, ?)" % new_t)
|
||||
|
||||
for key in dbmap.keys():
|
||||
self.update()
|
||||
data = pickle.loads(dbmap[key], encoding='utf-8')
|
||||
|
||||
if new_t == 'metadata':
|
||||
if key == b'version':
|
||||
# found a schema version in metadata
|
||||
schema_vers = data
|
||||
elif key == b'researcher':
|
||||
if len(data[0]) == 7: # Pre-3.3 format
|
||||
# Upgrade researcher data to include a locality
|
||||
# field in the address.
|
||||
addr = tuple([data[0][0], ''] + list(data[0][1:]))
|
||||
new_data = (addr, data[1], data[2], data[3])
|
||||
else:
|
||||
new_data = data
|
||||
data = Researcher().unserialize(new_data)
|
||||
elif key == b'name_formats':
|
||||
# upgrade formats if they were saved in the old way
|
||||
for format_ix in range(len(data)):
|
||||
fmat = data[format_ix]
|
||||
if len(fmat) == 3:
|
||||
fmat = fmat + (True,)
|
||||
data[format_ix] = fmat
|
||||
elif key == b'gender_stats':
|
||||
# data is a dict, containing entries (see GenderStats)
|
||||
self.dbapi.execute("DELETE FROM gender_stats")
|
||||
g_sql = ("INSERT INTO gender_stats "
|
||||
"(given_name, female, male, unknown) "
|
||||
"VALUES (?, ?, ?, ?)")
|
||||
for name in data:
|
||||
female, male, unknown = data[name]
|
||||
self.dbapi.execute(g_sql,
|
||||
[name, female, male, unknown])
|
||||
continue # don't need this in metadata anymore
|
||||
elif key == b'default':
|
||||
# convert to string and change key
|
||||
if isinstance(data, bytes):
|
||||
data = data.decode('utf-8')
|
||||
key = b'default-person-handle'
|
||||
elif key == b'mediapath':
|
||||
# change key
|
||||
key = b'media-path'
|
||||
elif key in [b'surname_list', # created by db now
|
||||
b'pevent_names', # obsolete
|
||||
b'fevent_names']: # obsolete
|
||||
continue
|
||||
elif (b'_names' in key or b'refs' in key or
|
||||
b'_roles' in key or b'rels' in key or
|
||||
b'_types' in key):
|
||||
# These are list, but need to be set
|
||||
data = set(data)
|
||||
|
||||
self.dbapi.execute(sql,
|
||||
[key.decode('utf-8'),
|
||||
pickle.dumps(data)])
|
||||
|
||||
# get schema version from file if not in metadata
|
||||
if new_t == 'metadata' and schema_vers is None:
|
||||
versionpath = os.path.join(dirname, str(SCHVERSFN))
|
||||
if os.path.isfile(versionpath):
|
||||
with open(versionpath, "r") as version_file:
|
||||
schema_vers = int(version_file.read().strip())
|
||||
else:
|
||||
schema_vers = 0
|
||||
# and put schema version into metadata
|
||||
self.dbapi.execute(sql, ["version", schema_vers])
|
||||
self._txn_commit()
|
||||
dbmap.close()
|
||||
if new_t == 'metadata' and schema_vers < _MINVERSION:
|
||||
raise DbVersionError(schema_vers, _MINVERSION, _DBVERSION)
|
||||
|
||||
if name_group_dbmap:
|
||||
self._txn_begin()
|
||||
for key in name_group_dbmap.keys():
|
||||
self.update()
|
||||
# name_group data (grouping) is NOT pickled
|
||||
data = name_group_dbmap[key]
|
||||
name = key.decode('utf-8')
|
||||
grouping = data.decode('utf-8')
|
||||
self.dbapi.execute(
|
||||
"INSERT INTO name_group (name, grouping) VALUES (?, ?)",
|
||||
[name, grouping])
|
||||
self._txn_commit()
|
||||
name_group_dbmap.close()
|
||||
|
||||
# done with new sqlite db, close it. Cannot use normal close as it
|
||||
# overwrites the metadata.
|
||||
self._close()
|
||||
try:
|
||||
clear_lock_file(self.get_save_path())
|
||||
except IOError:
|
||||
pass
|
||||
self.db_is_open = False
|
||||
self._directory = None
|
||||
|
||||
# copy tree name to new dir
|
||||
old_db_name = os.path.join(dirname, NAME_FILE)
|
||||
db_name = os.path.join(new_path, NAME_FILE)
|
||||
with open(old_db_name, "r", encoding='utf8') as _file:
|
||||
name = _file.read().strip()
|
||||
with open(db_name, "w", encoding='utf8') as _file:
|
||||
_file.write(name)
|
||||
# remove files from old dir
|
||||
for filename in os.listdir(dirname):
|
||||
file_path = os.path.join(dirname, filename)
|
||||
try:
|
||||
os.unlink(file_path)
|
||||
except Exception as e:
|
||||
LOG.error('Failed to delete %s. Reason: %s' % (file_path, e))
|
||||
# copy new db files to old dir
|
||||
for filename in os.listdir(new_path):
|
||||
old_file_path = os.path.join(new_path, filename)
|
||||
file_path = os.path.join(dirname, filename)
|
||||
try:
|
||||
os.replace(old_file_path, file_path)
|
||||
except Exception as e:
|
||||
LOG.error('Failed to move %s. Reason: %s' % (old_file_path, e))
|
||||
os.rmdir(new_path)
|
||||
|
||||
# done preparing new db, but we still need to finish schema upgrades
|
||||
raise DbUpgradeRequiredError(schema_vers, 'xx')
|
||||
|
@ -1,239 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2009 Gerald W. Britton
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
BSDDBTxn class: Wrapper for BSDDB transaction-oriented methods
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import logging
|
||||
import inspect
|
||||
import os
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.db.dbconst import DBLOGNAME
|
||||
_LOG = logging.getLogger(DBLOGNAME)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# BSDDBTxn
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
class BSDDBTxn:
|
||||
"""
|
||||
Wrapper for BSDDB methods that set up and manage transactions. Implements
|
||||
context management functionality allowing constructs like:
|
||||
|
||||
with BSDDBTxn(env) as txn:
|
||||
DB.get(txn=txn)
|
||||
DB.put(txn=txn)
|
||||
DB.delete(txn=txn)
|
||||
|
||||
and other transaction-oriented DB access methods, where "env" is a
|
||||
BSDDB DBEnv object and "DB" is a BSDDB database object.
|
||||
|
||||
Transactions are automatically begun when the "with" statement is executed
|
||||
and automatically committed when control flows off the end of the "with"
|
||||
statement context, either implicitly by reaching the end of the indentation
|
||||
level or explicity if a "return" statement is encountered or an exception
|
||||
is raised.
|
||||
"""
|
||||
|
||||
__slots__ = ['env', 'db', 'txn', 'parent']
|
||||
|
||||
def __init__(self, env, db=None):
|
||||
"""
|
||||
Initialize transaction instance
|
||||
"""
|
||||
# Conditional on __debug__ because all that frame stuff may be slow
|
||||
if __debug__:
|
||||
caller_frame = inspect.stack()[1]
|
||||
_LOG.debug(" BSDDBTxn %s instantiated. Called from file %s,"
|
||||
" line %s, in %s" %
|
||||
((hex(id(self)),)+
|
||||
(os.path.split(caller_frame[1])[1],)+
|
||||
(tuple(caller_frame[i] for i in range(2, 4)))
|
||||
)
|
||||
)
|
||||
self.env = env
|
||||
self.db = db
|
||||
self.txn = None
|
||||
|
||||
# Context manager methods
|
||||
|
||||
def __enter__(self, parent=None, **kwargs):
|
||||
"""
|
||||
Context manager entry method
|
||||
|
||||
Begin the transaction
|
||||
"""
|
||||
_LOG.debug(" BSDDBTxn %s entered" % hex(id(self)))
|
||||
self.txn = self.begin(parent, **kwargs)
|
||||
self.parent = parent
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Context manager exit function
|
||||
|
||||
Commit the transaction if no exception occurred
|
||||
"""
|
||||
_LOG.debug(" BSDDBTxn %s exited" % hex(id(self)))
|
||||
if exc_type is not None:
|
||||
return False
|
||||
if self.txn:
|
||||
self.commit()
|
||||
return True
|
||||
|
||||
# Methods implementing txn_ methods in DBEnv
|
||||
|
||||
def begin(self, *args, **kwargs):
|
||||
"""
|
||||
Create and begin a new transaction. A DBTxn object is returned
|
||||
"""
|
||||
_LOG.debug(" BSDDBTxn %s begin" % hex(id(self)))
|
||||
_LOG.debug(" BSDDBTxn %s calls %s %s txn_begin" %
|
||||
(hex(id(self)), self.env.__class__.__name__,
|
||||
hex(id(self.env)))
|
||||
)
|
||||
self.txn = self.env.txn_begin(*args, **kwargs)
|
||||
return self.txn
|
||||
|
||||
def checkpoint(self, *args, **kwargs):
|
||||
"""
|
||||
Flush the underlying memory pool, write a checkpoint record to the
|
||||
log and then flush the log
|
||||
"""
|
||||
if self.env:
|
||||
self.env.txn_checkpoint(*args, **kwargs)
|
||||
|
||||
def stat(self):
|
||||
"""
|
||||
Return a dictionary of transaction statistics
|
||||
"""
|
||||
if self.env:
|
||||
return self.env.txn_stat()
|
||||
|
||||
def recover(self):
|
||||
"""
|
||||
Returns a list of tuples (GID, TXN) of transactions prepared but
|
||||
still unresolved
|
||||
"""
|
||||
if self.env:
|
||||
return self.env.txn_recover()
|
||||
|
||||
# Methods implementing DBTxn methods
|
||||
|
||||
def abort(self):
|
||||
"""
|
||||
Abort the transaction
|
||||
"""
|
||||
if self.txn:
|
||||
self.txn.abort()
|
||||
self.txn = None
|
||||
|
||||
def commit(self, flags=0):
|
||||
"""
|
||||
End the transaction, committing any changes to the databases
|
||||
"""
|
||||
_LOG.debug(" BSDDBTxn %s commit" % hex(id(self)))
|
||||
if self.txn:
|
||||
self.txn.commit(flags)
|
||||
self.txn = None
|
||||
|
||||
def id(self):
|
||||
"""
|
||||
Return the unique transaction id associated with the specified
|
||||
transaction
|
||||
"""
|
||||
if self.txn:
|
||||
return self.txn.id()
|
||||
|
||||
def prepare(self, gid):
|
||||
"""
|
||||
Initiate the beginning of a two-phase commit
|
||||
"""
|
||||
if self.txn:
|
||||
self.txn.prepare(gid)
|
||||
|
||||
def discard(self):
|
||||
"""
|
||||
Release all the per-process resources associated with the specified
|
||||
transaction, neither committing nor aborting the transaction
|
||||
"""
|
||||
if self.txn:
|
||||
self.txn.discard()
|
||||
self.txn = None
|
||||
|
||||
# Methods implementing DB methods within the transaction context
|
||||
|
||||
def get(self, key, default=None, txn=None, **kwargs):
|
||||
"""
|
||||
Returns the data object associated with key
|
||||
"""
|
||||
return self.db.get(key, default, txn or self.txn, **kwargs)
|
||||
|
||||
def pget(self, key, default=None, txn=None, **kwargs):
|
||||
"""
|
||||
Returns the primary key, given the secondary one, and associated data
|
||||
"""
|
||||
return self.db.pget(key, default, txn or self.txn, **kwargs)
|
||||
|
||||
def put(self, key, data, txn=None, **kwargs):
|
||||
"""
|
||||
Stores the key/data pair in the database
|
||||
"""
|
||||
return self.db.put(key, data, txn or self.txn, **kwargs)
|
||||
|
||||
def delete(self, key, txn=None, **kwargs):
|
||||
"""
|
||||
Removes a key/data pair from the database
|
||||
"""
|
||||
self.db.delete(key, txn or self.txn, **kwargs)
|
||||
|
||||
# test code
|
||||
if __name__ == "__main__":
|
||||
print("1")
|
||||
from bsddb3 import db, dbshelve
|
||||
print("2")
|
||||
x = db.DBEnv()
|
||||
print("3")
|
||||
x.open('/tmp', db.DB_CREATE | db.DB_PRIVATE |\
|
||||
db.DB_INIT_MPOOL |\
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN)
|
||||
print("4")
|
||||
d = dbshelve.DBShelf(x)
|
||||
print("5")
|
||||
#from tran import BSDDBTxn as T
|
||||
print("6")
|
||||
T = BSDDBTxn
|
||||
with T(x) as tx:
|
||||
print("stat", tx.stat())
|
||||
print("id", tx.id())
|
||||
tx.checkpoint()
|
@ -1,134 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# gen/db/cursor.py
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from pickle import dumps, loads
|
||||
|
||||
try:
|
||||
from bsddb3 import db
|
||||
except:
|
||||
# FIXME: make this more abstract to deal with other backends
|
||||
class db:
|
||||
DB_RMW = 0
|
||||
DB_FIRST = 0
|
||||
DB_LAST = 0
|
||||
DB_CURRENT = 0
|
||||
DB_PREV = 0
|
||||
DB_NEXT = 0
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# BsddbBaseCursor class
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
class BsddbBaseCursor:
|
||||
"""
|
||||
Provide a basic iterator that allows the user to cycle through
|
||||
the elements in a particular map.
|
||||
|
||||
A cursor should never be directly instantiated. Instead, in should be
|
||||
created by the database class.
|
||||
|
||||
A cursor should only be used for a single pass through the
|
||||
database. If multiple passes are needed, multiple cursors
|
||||
should be used.
|
||||
"""
|
||||
|
||||
def __init__(self, txn=None, update=False, commit=False):
|
||||
"""
|
||||
Instantiate the object. Note, this method should be overridden in
|
||||
derived classes that properly set self.cursor and self.source
|
||||
"""
|
||||
self.cursor = self.source = None
|
||||
self.txn = txn
|
||||
self._update = update
|
||||
self.commit = commit
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""
|
||||
Return a method from the underlying cursor object, if it exists
|
||||
"""
|
||||
return getattr(self.cursor, name)
|
||||
|
||||
def __enter__(self):
|
||||
"""
|
||||
Context manager enter method
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Context manager exit method
|
||||
"""
|
||||
self.close()
|
||||
if self.txn and self.commit:
|
||||
self.txn.commit()
|
||||
return exc_type is None
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
|
||||
data = self.first()
|
||||
_n = self.next # Saved attribute lookup in the loop
|
||||
while data:
|
||||
yield data
|
||||
data = _n()
|
||||
|
||||
def _get(_flags=0):
|
||||
""" Closure that returns a cursor get function """
|
||||
|
||||
def get(self, flags=0, **kwargs):
|
||||
"""
|
||||
Issue DBCursor get call (with DB_RMW flag if update requested)
|
||||
Return results to caller
|
||||
"""
|
||||
data = self.cursor.get(
|
||||
_flags | flags | (db.DB_RMW if self._update else 0),
|
||||
**kwargs)
|
||||
|
||||
return (data[0].decode('utf-8'), loads(data[1])) if data else None
|
||||
|
||||
return get
|
||||
|
||||
# Use closure to define access methods
|
||||
|
||||
current = _get(db.DB_CURRENT)
|
||||
first = _get(db.DB_FIRST)
|
||||
##python2 iterator
|
||||
next = _get(db.DB_NEXT)
|
||||
##python3 iterator
|
||||
__next__ = _get(db.DB_NEXT)
|
||||
last = _get(db.DB_LAST)
|
||||
prev = _get(db.DB_PREV)
|
||||
|
||||
def update(self, key, data, flags=0, **kwargs):
|
||||
"""
|
||||
Write the current key, data pair to the database.
|
||||
"""
|
||||
self.cursor.put(key, dumps(data), flags=flags | db.DB_CURRENT,
|
||||
**kwargs)
|
File diff suppressed because it is too large
Load Diff
@ -1,85 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2016 Douglas S. Blank <doug.blank@gmail.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
## Removed from clidbman.py
|
||||
## specific to bsddb
|
||||
|
||||
import os
|
||||
from bsddb3 import dbshelve, db
|
||||
|
||||
from gramps.gen.db import META, PERSON_TBL
|
||||
from gramps.gen.db.dbconst import BDBVERSFN
|
||||
|
||||
import logging
|
||||
LOG = logging.getLogger(".dbsummary")
|
||||
|
||||
def get_dbdir_summary(dirpath, name):
|
||||
"""
|
||||
Returns (people_count, bsddb_version, schema_version) of
|
||||
current DB.
|
||||
Returns ("Unknown", "Unknown", "Unknown") if invalid DB or other error.
|
||||
"""
|
||||
|
||||
bdbversion_file = os.path.join(dirpath, BDBVERSFN)
|
||||
if os.path.isfile(bdbversion_file):
|
||||
with open(bdbversion_file) as vers_file:
|
||||
bsddb_version = vers_file.readline().strip()
|
||||
else:
|
||||
return "Unknown", "Unknown", "Unknown"
|
||||
|
||||
current_bsddb_version = str(db.version())
|
||||
if bsddb_version != current_bsddb_version:
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
|
||||
env = db.DBEnv()
|
||||
flags = db.DB_CREATE | db.DB_PRIVATE |\
|
||||
db.DB_INIT_MPOOL |\
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN
|
||||
try:
|
||||
env.open(dirpath, flags)
|
||||
except Exception as msg:
|
||||
LOG.warning("Error opening db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
try:
|
||||
env.close()
|
||||
except Exception as msg:
|
||||
LOG.warning("Error closing db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
dbmap1 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, META + ".db")
|
||||
try:
|
||||
dbmap1.open(fname, META, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
schema_version = dbmap1.get(b'version', default=None)
|
||||
dbmap1.close()
|
||||
dbmap2 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, PERSON_TBL + ".db")
|
||||
try:
|
||||
dbmap2.open(fname, PERSON_TBL, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, schema_version
|
||||
count = len(dbmap2)
|
||||
dbmap2.close()
|
||||
env.close()
|
||||
return (count, bsddb_version, schema_version)
|
@ -1,228 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
from bsddb3 import dbshelve, db
|
||||
|
||||
from ..read import DbBsddbTreeCursor
|
||||
|
||||
class Data:
|
||||
|
||||
def __init__(self, handle,surname, name):
|
||||
self.handle = handle
|
||||
self.surname = surname
|
||||
self.name = name
|
||||
|
||||
## def __repr__(self):
|
||||
## return repr((self.handle,self.surname,self.name))
|
||||
|
||||
class CursorTest(unittest.TestCase):
|
||||
"""Test the cursor handling."""
|
||||
|
||||
def setUp(self):
|
||||
self._tmpdir = tempfile.mkdtemp()
|
||||
self.full_name = os.path.join(self._tmpdir,'test.grdb')
|
||||
self.env = db.DBEnv()
|
||||
self.env.set_cachesize(0,0x2000000)
|
||||
self.env.set_lk_max_locks(25000)
|
||||
self.env.set_lk_max_objects(25000)
|
||||
|
||||
# clean up unused logs
|
||||
autoremove_flag = None
|
||||
autoremove_method = None
|
||||
for flag in ["DB_LOG_AUTO_REMOVE", "DB_LOG_AUTOREMOVE"]:
|
||||
if hasattr(db, flag):
|
||||
autoremove_flag = getattr(db, flag)
|
||||
break
|
||||
for method in ["log_set_config", "set_flags"]:
|
||||
if hasattr(self.env, method):
|
||||
autoremove_method = getattr(self.env, method)
|
||||
break
|
||||
if autoremove_method and autoremove_flag:
|
||||
autoremove_method(autoremove_flag, 1)
|
||||
|
||||
# The DB_PRIVATE flag must go if we ever move to multi-user setup
|
||||
env_flags = db.DB_CREATE|db.DB_RECOVER|db.DB_PRIVATE|\
|
||||
db.DB_INIT_MPOOL|db.DB_INIT_LOCK|\
|
||||
db.DB_INIT_LOG|db.DB_INIT_TXN
|
||||
|
||||
env_name = "%s/env" % (self._tmpdir,)
|
||||
if not os.path.isdir(env_name):
|
||||
os.mkdir(env_name)
|
||||
self.env.open(env_name,env_flags)
|
||||
(self.person_map,self.surnames) = self._open_tables()
|
||||
(self.place_map, self.placerefs) = self._open_treetables()
|
||||
|
||||
def _open_tables(self):
|
||||
dbmap = dbshelve.DBShelf(self.env)
|
||||
dbmap.db.set_pagesize(16384)
|
||||
dbmap.open(self.full_name, 'person', db.DB_HASH,
|
||||
db.DB_CREATE|db.DB_AUTO_COMMIT, 0o666)
|
||||
person_map = dbmap
|
||||
|
||||
table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT
|
||||
|
||||
surnames = db.DB(self.env)
|
||||
surnames.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
||||
surnames.open(self.full_name, "surnames", db.DB_BTREE,
|
||||
flags=table_flags)
|
||||
|
||||
def find_surname(key,data):
|
||||
val = data.surname
|
||||
if isinstance(val, str):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
person_map.associate(surnames, find_surname, table_flags)
|
||||
|
||||
return (person_map,surnames)
|
||||
|
||||
def _open_treetables(self):
|
||||
dbmap = dbshelve.DBShelf(self.env)
|
||||
dbmap.db.set_pagesize(16384)
|
||||
dbmap.open(self.full_name, 'places', db.DB_HASH,
|
||||
db.DB_CREATE|db.DB_AUTO_COMMIT, 0o666)
|
||||
place_map = dbmap
|
||||
|
||||
table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT
|
||||
|
||||
placerefs = db.DB(self.env)
|
||||
placerefs.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
||||
placerefs.open(self.full_name, "placerefs", db.DB_BTREE,
|
||||
flags=table_flags)
|
||||
|
||||
def find_placeref(key,data):
|
||||
val = data[2]
|
||||
if isinstance(val, str):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
place_map.associate(placerefs, find_placeref, table_flags)
|
||||
|
||||
return (place_map, placerefs)
|
||||
|
||||
def tearDown(self):
|
||||
self.person_map.close()
|
||||
self.surnames.close()
|
||||
self.place_map.close()
|
||||
self.placerefs.close()
|
||||
self.env.close()
|
||||
shutil.rmtree(self._tmpdir)
|
||||
|
||||
def test_simple_insert(self):
|
||||
"""test insert and retrieve works."""
|
||||
|
||||
data = Data(b'1' ,'surname1', 'name1')
|
||||
the_txn = self.env.txn_begin()
|
||||
self.person_map.put(data.handle, data, txn=the_txn)
|
||||
the_txn.commit()
|
||||
|
||||
v = self.person_map.get(data.handle)
|
||||
|
||||
self.assertEqual(v.handle, data.handle)
|
||||
|
||||
def test_insert_with_cursor_closed(self):
|
||||
"""test_insert_with_cursor_closed"""
|
||||
|
||||
cursor_txn = self.env.txn_begin()
|
||||
|
||||
cursor = self.surnames.cursor(txn=cursor_txn)
|
||||
cursor.first()
|
||||
cursor.next()
|
||||
cursor.close()
|
||||
cursor_txn.commit()
|
||||
|
||||
data = Data(b'2', 'surname2', 'name2')
|
||||
the_txn = self.env.txn_begin()
|
||||
self.person_map.put(data.handle, data, txn=the_txn)
|
||||
the_txn.commit()
|
||||
|
||||
v = self.person_map.get(data.handle)
|
||||
|
||||
self.assertEqual(v.handle, data.handle)
|
||||
|
||||
def test_insert_with_cursor_open(self):
|
||||
"""test_insert_with_cursor_open"""
|
||||
|
||||
cursor_txn = self.env.txn_begin()
|
||||
cursor = self.surnames.cursor(txn=cursor_txn)
|
||||
cursor.first()
|
||||
cursor.next()
|
||||
|
||||
data = Data(b'2', 'surname2', 'name2')
|
||||
self.person_map.put(data.handle, data, txn=cursor_txn)
|
||||
|
||||
cursor.close()
|
||||
cursor_txn.commit()
|
||||
|
||||
v = self.person_map.get(data.handle)
|
||||
|
||||
self.assertEqual(v.handle, data.handle)
|
||||
|
||||
def test_insert_with_cursor_open_and_db_open(self):
|
||||
"""test_insert_with_cursor_open_and_db_open"""
|
||||
|
||||
(person2,surnames2) = self._open_tables()
|
||||
|
||||
cursor_txn = self.env.txn_begin()
|
||||
cursor = surnames2.cursor(txn=cursor_txn)
|
||||
cursor.first()
|
||||
cursor.next()
|
||||
|
||||
data = Data(b'2', 'surname2', 'name2')
|
||||
self.person_map.put(data.handle, data, txn=cursor_txn)
|
||||
|
||||
cursor.close()
|
||||
cursor_txn.commit()
|
||||
|
||||
v = self.person_map.get(data.handle)
|
||||
|
||||
self.assertEqual(v.handle, data.handle)
|
||||
|
||||
def test_treecursor(self):
|
||||
#fill with data
|
||||
the_txn = self.env.txn_begin()
|
||||
data = [('1', 'countryA', '' ),
|
||||
('2', 'localityA', '1' ),
|
||||
('3', 'localityB', '1' ),
|
||||
('4', 'countryB', '' ),
|
||||
('5', 'streetA', '2' ),
|
||||
('6', 'countryB', '' )]
|
||||
for d in data:
|
||||
self.place_map.put(d[0].encode('utf-8'), d, txn=the_txn)
|
||||
the_txn.commit()
|
||||
|
||||
cursor_txn = self.env.txn_begin()
|
||||
cursor = DbBsddbTreeCursor(self.placerefs, self.place_map, False,
|
||||
cursor_txn)
|
||||
placenames = set([d[1] for handle, d in cursor])
|
||||
|
||||
cursor.close()
|
||||
cursor_txn.commit()
|
||||
pldata = set([d[1] for d in data])
|
||||
self.assertEqual(placenames, pldata)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,257 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
|
||||
from .. import DbReadBase, DbWriteBase, DbBsddbRead, DbBsddb
|
||||
from gramps.gen.proxy.proxybase import ProxyDbBase
|
||||
from gramps.gen.proxy import LivingProxyDb
|
||||
|
||||
class DbTest(unittest.TestCase):
|
||||
READ_METHODS = [
|
||||
"close",
|
||||
"db_has_bm_changes",
|
||||
"find_backlink_handles",
|
||||
"find_initial_person",
|
||||
"find_next_event_gramps_id",
|
||||
"find_next_family_gramps_id",
|
||||
"find_next_note_gramps_id",
|
||||
"find_next_media_gramps_id",
|
||||
"find_next_person_gramps_id",
|
||||
"find_next_place_gramps_id",
|
||||
"find_next_repository_gramps_id",
|
||||
"find_next_source_gramps_id",
|
||||
"get_bookmarks",
|
||||
"get_child_reference_types",
|
||||
"get_dbid",
|
||||
"get_dbname",
|
||||
"get_default_handle",
|
||||
"get_default_person",
|
||||
"get_event_attribute_types",
|
||||
"get_event_bookmarks",
|
||||
"get_event_cursor",
|
||||
"get_event_from_gramps_id",
|
||||
"get_event_from_handle",
|
||||
"get_event_handles",
|
||||
"get_event_roles",
|
||||
"get_event_types",
|
||||
"get_family_attribute_types",
|
||||
"get_family_bookmarks",
|
||||
"get_family_cursor",
|
||||
"get_family_from_gramps_id",
|
||||
"get_family_from_handle",
|
||||
"get_family_handles",
|
||||
"get_family_relation_types",
|
||||
"get_media_attribute_types",
|
||||
"get_media_bookmarks",
|
||||
"get_media_cursor",
|
||||
"get_media_handles",
|
||||
"get_mediapath",
|
||||
"get_name_group_keys",
|
||||
"get_name_group_mapping",
|
||||
"get_name_types",
|
||||
"get_note_bookmarks",
|
||||
"get_note_cursor",
|
||||
"get_note_from_gramps_id",
|
||||
"get_note_from_handle",
|
||||
"get_note_handles",
|
||||
"get_note_types",
|
||||
"get_number_of_events",
|
||||
"get_number_of_families",
|
||||
"get_number_of_media",
|
||||
"get_number_of_notes",
|
||||
"get_number_of_people",
|
||||
"get_number_of_places",
|
||||
"get_number_of_repositories",
|
||||
"get_number_of_sources",
|
||||
"get_number_of_citations",
|
||||
"get_number_of_tags",
|
||||
"get_media_from_gramps_id",
|
||||
"get_media_from_handle",
|
||||
"get_person_attribute_types",
|
||||
"get_person_cursor",
|
||||
"get_person_from_gramps_id",
|
||||
"get_person_from_handle",
|
||||
"get_person_handles",
|
||||
"get_place_bookmarks",
|
||||
"get_place_cursor",
|
||||
"get_place_from_gramps_id",
|
||||
"get_place_from_handle",
|
||||
"get_place_handles",
|
||||
"get_raw_event_data",
|
||||
"get_raw_family_data",
|
||||
"get_raw_note_data",
|
||||
"get_raw_media_data",
|
||||
"get_raw_person_data",
|
||||
"get_raw_place_data",
|
||||
"get_raw_repository_data",
|
||||
"get_raw_source_data",
|
||||
"get_raw_tag_data",
|
||||
"get_repo_bookmarks",
|
||||
"get_repository_cursor",
|
||||
"get_repository_from_gramps_id",
|
||||
"get_repository_from_handle",
|
||||
"get_repository_handles",
|
||||
"get_repository_types",
|
||||
"get_researcher",
|
||||
"get_save_path",
|
||||
"get_source_bookmarks",
|
||||
"get_source_cursor",
|
||||
"get_source_from_gramps_id",
|
||||
"get_source_from_handle",
|
||||
"get_source_handles",
|
||||
"get_source_media_types",
|
||||
"get_tag_cursor",
|
||||
"get_tag_from_name",
|
||||
"get_tag_from_handle",
|
||||
"get_tag_handles",
|
||||
"get_surname_list",
|
||||
"get_url_types",
|
||||
"has_event_handle",
|
||||
"has_family_handle",
|
||||
"has_name_group_key",
|
||||
"has_note_handle",
|
||||
"has_media_handle",
|
||||
"has_person_handle",
|
||||
"has_place_handle",
|
||||
"has_repository_handle",
|
||||
"has_source_handle",
|
||||
"has_tag_handle",
|
||||
"is_open",
|
||||
"iter_event_handles",
|
||||
"iter_events",
|
||||
"iter_families",
|
||||
"iter_family_handles",
|
||||
"iter_media_handles",
|
||||
"iter_media",
|
||||
"iter_note_handles",
|
||||
"iter_notes",
|
||||
"iter_people",
|
||||
"iter_person_handles",
|
||||
"iter_place_handles",
|
||||
"iter_places",
|
||||
"iter_repositories",
|
||||
"iter_repository_handles",
|
||||
"iter_source_handles",
|
||||
"iter_sources",
|
||||
"iter_tag_handles",
|
||||
"iter_tags",
|
||||
"load",
|
||||
"report_bm_change",
|
||||
"request_rebuild",
|
||||
# Prefix:
|
||||
"set_event_id_prefix",
|
||||
"set_family_id_prefix",
|
||||
"set_note_id_prefix",
|
||||
"set_media_id_prefix",
|
||||
"set_person_id_prefix",
|
||||
"set_place_id_prefix",
|
||||
"set_prefixes",
|
||||
"set_repository_id_prefix",
|
||||
"set_source_id_prefix",
|
||||
# Other set methods:
|
||||
"set_mediapath",
|
||||
"set_researcher",
|
||||
"version_supported",
|
||||
]
|
||||
|
||||
WRITE_METHODS = [
|
||||
"add_event",
|
||||
"add_family",
|
||||
"add_note",
|
||||
"add_media",
|
||||
"add_person",
|
||||
"add_place",
|
||||
"add_repository",
|
||||
"add_source",
|
||||
"add_tag",
|
||||
"add_to_surname_list",
|
||||
"commit_event",
|
||||
"commit_family",
|
||||
"commit_media",
|
||||
"commit_note",
|
||||
"commit_person",
|
||||
"commit_place",
|
||||
"commit_repository",
|
||||
"commit_source",
|
||||
"commit_tag",
|
||||
"rebuild_secondary",
|
||||
"reindex_reference_map",
|
||||
"remove_event",
|
||||
"remove_family",
|
||||
"remove_from_surname_list",
|
||||
"remove_note",
|
||||
"remove_media",
|
||||
"remove_person",
|
||||
"remove_place",
|
||||
"remove_repository",
|
||||
"remove_source",
|
||||
"remove_tag",
|
||||
"set_default_person_handle",
|
||||
"set_name_group_mapping",
|
||||
"transaction_begin",
|
||||
"transaction_commit",
|
||||
]
|
||||
|
||||
def _verify_readonly(self, db):
|
||||
for method in self.READ_METHODS:
|
||||
self.assertTrue(hasattr(db, method),
|
||||
("readonly should have a '%s' method" % method))
|
||||
for method in self.WRITE_METHODS:
|
||||
self.assertFalse(hasattr(db, method),
|
||||
("readonly should NOT have a '%s' method" % method))
|
||||
|
||||
def _verify_readwrite(self, db):
|
||||
for method in self.READ_METHODS:
|
||||
self.assertTrue(hasattr(db, method),
|
||||
("readwrite should have a '%s' method" % method))
|
||||
for method in self.WRITE_METHODS:
|
||||
self.assertTrue(hasattr(db, method),
|
||||
("readwrite should have a '%s' method" % method))
|
||||
|
||||
def test_verify_readbase(self):
|
||||
db = DbReadBase()
|
||||
self._verify_readonly(db)
|
||||
|
||||
def test_verify_writebase(self):
|
||||
db = DbWriteBase()
|
||||
self._verify_readwrite(db)
|
||||
|
||||
def test_verify_read(self):
|
||||
db = DbBsddbRead()
|
||||
self._verify_readonly(db)
|
||||
|
||||
def test_verify_write(self):
|
||||
db = DbBsddb()
|
||||
self._verify_readwrite(db)
|
||||
|
||||
def test_verify_proxy(self):
|
||||
gdb = DbBsddb()
|
||||
db = ProxyDbBase(gdb)
|
||||
self._verify_readonly(db)
|
||||
|
||||
def test_verify_living(self):
|
||||
gdb = DbBsddb()
|
||||
db = LivingProxyDb(gdb, LivingProxyDb.MODE_EXCLUDE_ALL)
|
||||
self._verify_readonly(db)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,164 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
|
||||
from .. import DbBsddb, DbTxn
|
||||
from gramps.cli.clidbman import CLIDbManager
|
||||
from gramps.gen.dbstate import DbState
|
||||
from gramps.gen.db.utils import make_database
|
||||
from gramps.gen.lib import (Source, RepoRef, Citation, Repository, Person,
|
||||
Family, Event, Place, Media)
|
||||
|
||||
class GrampsDbBaseTest(unittest.TestCase):
|
||||
"""Base class for unittest that need to be able to create
|
||||
test databases."""
|
||||
|
||||
def setUp(self):
|
||||
def dummy_callback(dummy):
|
||||
pass
|
||||
|
||||
self.dbstate = DbState()
|
||||
self.dbman = CLIDbManager(self.dbstate)
|
||||
dirpath, name = self.dbman.create_new_db_cli("Test: bsddb", dbid="bsddb")
|
||||
self._db = make_database("bsddb")
|
||||
self._db.load(dirpath, None)
|
||||
|
||||
def tearDown(self):
|
||||
self._db.close()
|
||||
self.dbman.remove_database("Test: bsddb")
|
||||
|
||||
def _populate_database(self,
|
||||
num_sources = 1,
|
||||
num_persons = 0,
|
||||
num_families = 0,
|
||||
num_events = 0,
|
||||
num_places = 0,
|
||||
num_media = 0,
|
||||
num_links = 1):
|
||||
# start with sources
|
||||
sources = []
|
||||
for i in range(num_sources):
|
||||
sources.append(self._add_source())
|
||||
|
||||
# now for each of the other tables. Give each entry a link
|
||||
# to num_link sources, sources are chosen on a round robin
|
||||
# basis
|
||||
|
||||
for num, add_func in ((num_persons, self._add_person_with_sources),
|
||||
(num_families, self._add_family_with_sources),
|
||||
(num_events, self._add_event_with_sources),
|
||||
(num_places, self._add_place_with_sources),
|
||||
(num_media, self._add_media_with_sources)):
|
||||
|
||||
source_idx = 1
|
||||
for person_idx in range(num):
|
||||
|
||||
# Get the list of sources to link
|
||||
lnk_sources = set()
|
||||
for i in range(num_links):
|
||||
lnk_sources.add(sources[source_idx-1])
|
||||
source_idx = (source_idx+1) % len(sources)
|
||||
|
||||
try:
|
||||
add_func(lnk_sources)
|
||||
except:
|
||||
print ("person_idx = ", person_idx)
|
||||
print ("lnk_sources = ", repr(lnk_sources))
|
||||
raise
|
||||
|
||||
return
|
||||
|
||||
def _add_source(self,repos=None):
|
||||
# Add a Source
|
||||
|
||||
with DbTxn("Add Source and Citation", self._db) as tran:
|
||||
source = Source()
|
||||
if repos is not None:
|
||||
repo_ref = RepoRef()
|
||||
repo_ref.set_reference_handle(repos.get_handle())
|
||||
source.add_repo_reference(repo_ref)
|
||||
self._db.add_source(source, tran)
|
||||
self._db.commit_source(source, tran)
|
||||
citation = Citation()
|
||||
citation.set_reference_handle(source.get_handle())
|
||||
self._db.add_citation(citation, tran)
|
||||
self._db.commit_citation(citation, tran)
|
||||
|
||||
return citation
|
||||
|
||||
def _add_repository(self):
|
||||
# Add a Repository
|
||||
|
||||
with DbTxn("Add Repository", self._db) as tran:
|
||||
repos = Repository()
|
||||
self._db.add_repository(repos, tran)
|
||||
self._db.commit_repository(repos, tran)
|
||||
|
||||
return repos
|
||||
|
||||
|
||||
def _add_object_with_source(self, citations, object_class, add_method,
|
||||
commit_method):
|
||||
|
||||
object = object_class()
|
||||
|
||||
with DbTxn("Add Object", self._db) as tran:
|
||||
for citation in citations:
|
||||
object.add_citation(citation.get_handle())
|
||||
add_method(object, tran)
|
||||
commit_method(object, tran)
|
||||
|
||||
return object
|
||||
|
||||
def _add_person_with_sources(self, citations):
|
||||
|
||||
return self._add_object_with_source(citations,
|
||||
Person,
|
||||
self._db.add_person,
|
||||
self._db.commit_person)
|
||||
|
||||
def _add_family_with_sources(self, citations):
|
||||
|
||||
return self._add_object_with_source(citations,
|
||||
Family,
|
||||
self._db.add_family,
|
||||
self._db.commit_family)
|
||||
|
||||
def _add_event_with_sources(self, citations):
|
||||
|
||||
return self._add_object_with_source(citations,
|
||||
Event,
|
||||
self._db.add_event,
|
||||
self._db.commit_event)
|
||||
|
||||
def _add_place_with_sources(self, citations):
|
||||
|
||||
return self._add_object_with_source(citations,
|
||||
Place,
|
||||
self._db.add_place,
|
||||
self._db.commit_place)
|
||||
|
||||
def _add_media_with_sources(self, citations):
|
||||
|
||||
return self._add_object_with_source(citations,
|
||||
Media,
|
||||
self._db.add_media,
|
||||
self._db.commit_media)
|
@ -1,219 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import logging
|
||||
import time
|
||||
|
||||
from .. import DbTxn
|
||||
from gramps.gen.lib import Person, Event, Source, Citation
|
||||
from gramps.gen.errors import HandleError
|
||||
|
||||
logger = logging.getLogger('Gramps.GrampsDbBase_Test')
|
||||
|
||||
from .grampsdbtestbase import GrampsDbBaseTest
|
||||
|
||||
class ReferenceMapTest(GrampsDbBaseTest):
|
||||
"""Test methods on the GrampsDbBase class that are related to the reference_map
|
||||
index implementation."""
|
||||
|
||||
def test_simple_lookup(self):
|
||||
"""insert a record and a reference and check that
|
||||
a lookup for the reference returns the original
|
||||
record."""
|
||||
|
||||
citation = self._add_source()
|
||||
person = self._add_person_with_sources([citation])
|
||||
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 1)
|
||||
self.assertEqual(references[0], (Person.__name__, person.get_handle()))
|
||||
|
||||
def test_backlink_for_repository(self):
|
||||
"""check that the citation /source / repos backlink lookup works."""
|
||||
|
||||
repos = self._add_repository()
|
||||
citation = self._add_source(repos=repos)
|
||||
|
||||
references = list(self._db.find_backlink_handles(repos.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 1)
|
||||
self.assertEqual(references[0][0], Source.__name__)
|
||||
|
||||
references = list(self._db.find_backlink_handles(references[0][1]))
|
||||
|
||||
self.assertEqual(len(references), 1)
|
||||
self.assertEqual(references[0],
|
||||
(Citation.__name__, citation.get_handle()))
|
||||
|
||||
def test_class_limited_lookup(self):
|
||||
"""check that class limited lookups work."""
|
||||
|
||||
citation = self._add_source()
|
||||
person = self._add_person_with_sources([citation])
|
||||
|
||||
self._add_family_with_sources([citation])
|
||||
self._add_event_with_sources([citation])
|
||||
self._add_place_with_sources([citation])
|
||||
self._add_media_with_sources([citation])
|
||||
|
||||
# make sure that we have the correct number of references (one for each object)
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 5,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
|
||||
# should just return the person reference
|
||||
references = [ref for ref in self._db.find_backlink_handles(citation.get_handle(), (Person.__name__,))]
|
||||
self.assertEqual(len(references), 1,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
self.assertEqual(references[0][0], Person.__name__,
|
||||
"references = %s" % repr(references))
|
||||
|
||||
# should just return the person and event reference
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle(),
|
||||
(Person.__name__, Event.__name__)))
|
||||
self.assertEqual(len(references), 2,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
self.assertEqual(references[0][0], Person.__name__,
|
||||
"references = %s" % repr(references))
|
||||
self.assertEqual(references[1][0], Event.__name__,
|
||||
"references = %s" % repr(references))
|
||||
|
||||
def test_delete_primary(self):
|
||||
"""check that deleting a primary will remove the backreferences
|
||||
from the reference_map"""
|
||||
|
||||
citation = self._add_source()
|
||||
person = self._add_person_with_sources([citation])
|
||||
|
||||
self.assertIsNotNone(self._db.get_person_from_handle(person.get_handle()))
|
||||
|
||||
with DbTxn("Del Person", self._db) as tran:
|
||||
self._db.remove_person(person.get_handle(),tran)
|
||||
|
||||
self.assertRaises(HandleError, self._db.get_person_from_handle,
|
||||
person.get_handle())
|
||||
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 0,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
|
||||
def test_reindex_reference_map(self):
|
||||
"""Test that the reindex function works."""
|
||||
|
||||
def cb(count):
|
||||
pass
|
||||
|
||||
# unhook the reference_map update function so that we
|
||||
# can insert some records without the reference_map being updated.
|
||||
update_method = self._db._update_reference_map
|
||||
self._db._update_reference_map = lambda x,y,z: 1
|
||||
|
||||
# Insert a person/source pair.
|
||||
citation = self._add_source()
|
||||
person = self._add_person_with_sources([citation])
|
||||
|
||||
# Check that the reference map does not contain the reference.
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 0,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
|
||||
# Reinstate the reference_map method and reindex the database
|
||||
self._db._update_reference_map = update_method
|
||||
self._db.reindex_reference_map(cb)
|
||||
|
||||
# Check that the reference now appears in the reference_map
|
||||
references = list(self._db.find_backlink_handles(citation.get_handle()))
|
||||
|
||||
self.assertEqual(len(references), 1,
|
||||
"len(references) == %s " % str(len(references)))
|
||||
|
||||
def perf_simple_search_speed(self):
|
||||
"""
|
||||
This doesn't work any more due to multiply inheritance changes.
|
||||
"""
|
||||
|
||||
num_sources = 100
|
||||
num_persons = 1000
|
||||
num_families = 10
|
||||
num_events = 10
|
||||
num_places = 10
|
||||
num_media = 10
|
||||
num_links = 10
|
||||
|
||||
self._populate_database(num_sources,
|
||||
num_persons,
|
||||
num_families,
|
||||
num_events,
|
||||
num_places,
|
||||
num_media,
|
||||
num_links)
|
||||
|
||||
|
||||
# time searching for source backrefs with and without reference_map
|
||||
cur = self._db.get_source_cursor()
|
||||
handle,data = cur.first()
|
||||
cur.close()
|
||||
|
||||
start = time.time()
|
||||
references = list(self._db.find_backlink_handles(handle))
|
||||
end = time.time()
|
||||
|
||||
with_reference_map = end - start
|
||||
|
||||
remember = self._db.__class__.find_backlink_handles
|
||||
|
||||
self._db.__class__.find_backlink_handles = self._db.__class__.__base__.find_backlink_handles
|
||||
|
||||
start = time.time()
|
||||
references = list(self._db.find_backlink_handles(handle))
|
||||
end = time.time()
|
||||
|
||||
without_reference_map = end - start
|
||||
|
||||
self._db.__class__.find_backlink_handles = remember
|
||||
|
||||
logger.info("search test with following data: \n"
|
||||
"num_sources = %d \n"
|
||||
"num_persons = %d \n"
|
||||
"num_families = %d \n"
|
||||
"num_events = %d \n"
|
||||
"num_places = %d \n"
|
||||
"num_media = %d \n"
|
||||
"num_links = %d" % (num_sources,
|
||||
num_persons,
|
||||
num_families,
|
||||
num_events,
|
||||
num_places,
|
||||
num_media,
|
||||
num_links))
|
||||
logger.info("with refs %s\n", str(with_reference_map))
|
||||
logger.info("without refs %s\n", str(without_reference_map))
|
||||
|
||||
self.assertLess(with_reference_map, without_reference_map / 10,
|
||||
"Reference_map should an order of magnitude faster.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,548 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2004-2006 Donald N. Allingham
|
||||
# Copyright (C) 2011 Tim G L Lyons
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
Exports the DbUndo class for managing Gramps transactions
|
||||
undos and redos.
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import time, os
|
||||
import pickle
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
from bsddb3 import db
|
||||
except:
|
||||
# FIXME: make this more abstract to deal with other backends
|
||||
class db:
|
||||
DBRunRecoveryError = 0
|
||||
DBAccessError = 0
|
||||
DBPageNotFoundError = 0
|
||||
DBInvalidArgError = 0
|
||||
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.db.dbconst import (REFERENCE_KEY, KEY_TO_NAME_MAP, TXNDEL,
|
||||
TXNADD, TXNUPD)
|
||||
from . import BSDDBTxn
|
||||
from gramps.gen.errors import DbError
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Local Constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
|
||||
db.DBPageNotFoundError, db.DBInvalidArgError)
|
||||
|
||||
_SIGBASE = ('person', 'family', 'source', 'event', 'media',
|
||||
'place', 'repository', 'reference', 'note', 'tag', 'citation')
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# DbUndo class
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
class DbUndo:
|
||||
"""
|
||||
Base class for the Gramps undo/redo manager. Needs to be subclassed
|
||||
for use with a real backend.
|
||||
"""
|
||||
|
||||
__slots__ = ('undodb', 'db', 'mapbase', 'undo_history_timestamp',
|
||||
'txn', 'undoq', 'redoq')
|
||||
|
||||
def __init__(self, grampsdb):
|
||||
"""
|
||||
Class constructor. Set up main instance variables
|
||||
"""
|
||||
self.db = grampsdb
|
||||
self.undoq = deque()
|
||||
self.redoq = deque()
|
||||
self.undo_history_timestamp = time.time()
|
||||
self.txn = None
|
||||
# N.B. the databases have to be in the same order as the numbers in
|
||||
# xxx_KEY in gen/db/dbconst.py
|
||||
self.mapbase = (
|
||||
self.db.person_map,
|
||||
self.db.family_map,
|
||||
self.db.source_map,
|
||||
self.db.event_map,
|
||||
self.db.media_map,
|
||||
self.db.place_map,
|
||||
self.db.repository_map,
|
||||
self.db.reference_map,
|
||||
self.db.note_map,
|
||||
self.db.tag_map,
|
||||
self.db.citation_map,
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Clear the undo/redo list (but not the backing storage)
|
||||
"""
|
||||
self.undoq.clear()
|
||||
self.redoq.clear()
|
||||
self.undo_history_timestamp = time.time()
|
||||
self.txn = None
|
||||
|
||||
def __enter__(self, value):
|
||||
"""
|
||||
Context manager method to establish the context
|
||||
"""
|
||||
self.open(value)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Context manager method to finish the context
|
||||
"""
|
||||
if exc_type is None:
|
||||
self.close()
|
||||
return exc_type is None
|
||||
|
||||
def open(self, value):
|
||||
"""
|
||||
Open the backing storage. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the backing storage. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add a new entry on the end. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns an entry by index number. Needs to be overridden in the
|
||||
derived class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Set an entry to a value. Needs to be overridden in the derived class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of entries. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def commit(self, txn, msg):
|
||||
"""
|
||||
Commit the transaction to the undo/redo database. "txn" should be
|
||||
an instance of Gramps transaction class
|
||||
"""
|
||||
txn.set_description(msg)
|
||||
txn.timestamp = time.time()
|
||||
self.undoq.append(txn)
|
||||
|
||||
def undo(self, update_history=True):
|
||||
"""
|
||||
Undo a previously committed transaction
|
||||
"""
|
||||
if self.db.readonly or self.undo_count == 0:
|
||||
return False
|
||||
return self.__undo(update_history)
|
||||
|
||||
def redo(self, update_history=True):
|
||||
"""
|
||||
Redo a previously committed, then undone, transaction
|
||||
"""
|
||||
if self.db.readonly or self.redo_count == 0:
|
||||
return False
|
||||
return self.__redo(update_history)
|
||||
|
||||
def undoredo(func):
|
||||
"""
|
||||
Decorator function to wrap undo and redo operations within a bsddb
|
||||
transaction. It also catches bsddb errors and raises an exception
|
||||
as appropriate
|
||||
"""
|
||||
def try_(self, *args, **kwargs):
|
||||
try:
|
||||
with BSDDBTxn(self.db.env) as txn:
|
||||
self.txn = self.db.txn = txn.txn
|
||||
status = func(self, *args, **kwargs)
|
||||
if not status:
|
||||
txn.abort()
|
||||
self.db.txn = None
|
||||
return status
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
return try_
|
||||
|
||||
@undoredo
|
||||
def __undo(self, update_history=True):
|
||||
"""
|
||||
Access the last committed transaction, and revert the data to the
|
||||
state before the transaction was committed.
|
||||
"""
|
||||
txn = self.undoq.pop()
|
||||
self.redoq.append(txn)
|
||||
transaction = txn
|
||||
subitems = transaction.get_recnos(reverse=True)
|
||||
# sigs[obj_type][trans_type]
|
||||
sigs = [[[] for trans_type in range(3)] for key in range(11)]
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(old_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(old_data, handle, self.mapbase[key])
|
||||
handle = handle.decode('utf-8')
|
||||
sigs[key][trans_type].append(handle)
|
||||
# now emit the signals
|
||||
self.undo_sigs(sigs, True)
|
||||
|
||||
# Notify listeners
|
||||
if self.db.undo_callback:
|
||||
if self.undo_count > 0:
|
||||
self.db.undo_callback(_("_Undo %s")
|
||||
% self.undoq[-1].get_description())
|
||||
else:
|
||||
self.db.undo_callback(None)
|
||||
|
||||
if self.db.redo_callback:
|
||||
self.db.redo_callback(_("_Redo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if update_history and self.db.undo_history_callback:
|
||||
self.db.undo_history_callback()
|
||||
return True
|
||||
|
||||
@undoredo
|
||||
def __redo(self, db=None, update_history=True):
|
||||
"""
|
||||
Access the last undone transaction, and revert the data to the state
|
||||
before the transaction was undone.
|
||||
"""
|
||||
txn = self.redoq.pop()
|
||||
self.undoq.append(txn)
|
||||
transaction = txn
|
||||
subitems = transaction.get_recnos()
|
||||
# sigs[obj_type][trans_type]
|
||||
sigs = [[[] for trans_type in range(3)] for key in range(11)]
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(new_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(new_data, handle, self.mapbase[key])
|
||||
handle = handle.decode('utf-8')
|
||||
sigs[key][trans_type].append(handle)
|
||||
# now emit the signals
|
||||
self.undo_sigs(sigs, False)
|
||||
|
||||
# Notify listeners
|
||||
if self.db.undo_callback:
|
||||
self.db.undo_callback(_("_Undo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if self.db.redo_callback:
|
||||
if self.redo_count > 1:
|
||||
new_transaction = self.redoq[-2]
|
||||
self.db.redo_callback(_("_Redo %s")
|
||||
% new_transaction.get_description())
|
||||
else:
|
||||
self.db.redo_callback(None)
|
||||
|
||||
if update_history and self.db.undo_history_callback:
|
||||
self.db.undo_history_callback()
|
||||
return True
|
||||
|
||||
def undo_reference(self, data, handle, db_map):
|
||||
"""
|
||||
Helper method to undo a reference map entry
|
||||
"""
|
||||
try:
|
||||
if data is None:
|
||||
db_map.delete(handle, txn=self.txn)
|
||||
else:
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
def undo_data(self, data, handle, db_map):
|
||||
"""
|
||||
Helper method to undo/redo the changes made
|
||||
"""
|
||||
try:
|
||||
if data is None:
|
||||
db_map.delete(handle, txn=self.txn)
|
||||
else:
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
def undo_sigs(self, sigs, undo):
|
||||
"""
|
||||
Helper method to undo/redo the signals for changes made
|
||||
We want to do deletes and adds first
|
||||
Note that if 'undo' we swap emits
|
||||
"""
|
||||
for trans_type in [TXNDEL, TXNADD, TXNUPD]:
|
||||
for obj_type in range(11):
|
||||
handles = sigs[obj_type][trans_type]
|
||||
if handles:
|
||||
if not undo and trans_type == TXNDEL \
|
||||
or undo and trans_type == TXNADD:
|
||||
typ = '-delete'
|
||||
else:
|
||||
# don't update a handle if its been deleted, and note
|
||||
# that 'deleted' handles are in the 'add' list if we
|
||||
# are undoing
|
||||
handles = [handle for handle in handles
|
||||
if handle not in
|
||||
sigs[obj_type][TXNADD if undo else TXNDEL]]
|
||||
if ((not undo) and trans_type == TXNADD) \
|
||||
or (undo and trans_type == TXNDEL):
|
||||
typ = '-add'
|
||||
else: # TXNUPD
|
||||
typ = '-update'
|
||||
if handles:
|
||||
self.db.emit(KEY_TO_NAME_MAP[obj_type] + typ,
|
||||
(handles,))
|
||||
|
||||
undo_count = property(lambda self:len(self.undoq))
|
||||
redo_count = property(lambda self:len(self.redoq))
|
||||
|
||||
class DbUndoList(DbUndo):
|
||||
"""
|
||||
Implementation of the Gramps undo database using a Python list
|
||||
"""
|
||||
def __init__(self, grampsdb):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoList, self).__init__(grampsdb)
|
||||
self.undodb = []
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
A list does not need to be opened
|
||||
"""
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the list by resetting it to empty
|
||||
"""
|
||||
self.undodb = []
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the list
|
||||
"""
|
||||
self.undodb.append(value)
|
||||
return len(self.undodb)-1
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Return an item at the specified index
|
||||
"""
|
||||
return self.undodb[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Set an item at the speficied index to the given value
|
||||
"""
|
||||
self.undodb[index] = value
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
for item in self.undodb:
|
||||
yield item
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return number of entries in the list
|
||||
"""
|
||||
return len(self.undodb)
|
||||
|
||||
class DbUndoBSDDB(DbUndo):
|
||||
"""
|
||||
Class constructor for Gramps undo/redo database using a bsddb recno
|
||||
database as the backing store.
|
||||
"""
|
||||
|
||||
def __init__(self, grampsdb, path):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoBSDDB, self).__init__(grampsdb)
|
||||
self.undodb = db.DB()
|
||||
self.path = path
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
Open the undo/redo database
|
||||
"""
|
||||
path = self.path
|
||||
self.undodb.open(path, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the undo/redo database
|
||||
"""
|
||||
self.undodb.close()
|
||||
self.undodb = None
|
||||
self.mapbase = None
|
||||
self.db = None
|
||||
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except OSError:
|
||||
pass
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the database
|
||||
"""
|
||||
return self.undodb.append(value)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of entries in the database
|
||||
"""
|
||||
x = self.undodb.stat()['nkeys']
|
||||
y = len(self.undodb)
|
||||
assert x == y
|
||||
return x
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the entry stored at the specified index
|
||||
"""
|
||||
return self.undodb.get(index)
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Sets the entry stored at the specified index to the value given.
|
||||
"""
|
||||
self.undodb.put(index, value)
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
cursor = self.undodb.cursor()
|
||||
data = cursor.first()
|
||||
while data:
|
||||
yield data
|
||||
data = next(cursor)
|
||||
|
||||
def testundo():
|
||||
class T:
|
||||
def __init__(self):
|
||||
self.msg = ''
|
||||
self.timetstamp = 0
|
||||
def set_description(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
class D:
|
||||
def __init__(self):
|
||||
self.person_map = {}
|
||||
self.family_map = {}
|
||||
self.source_map = {}
|
||||
self.event_map = {}
|
||||
self.media_map = {}
|
||||
self.place_map = {}
|
||||
self.note_map = {}
|
||||
self.tag_map = {}
|
||||
self.repository_map = {}
|
||||
self.reference_map = {}
|
||||
|
||||
print("list tests")
|
||||
undo = DbUndoList(D())
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('bar'))
|
||||
print(undo[0])
|
||||
undo[0] = 'foobar'
|
||||
print(undo[0])
|
||||
print("len", len(undo))
|
||||
print("iter")
|
||||
for data in undo:
|
||||
print(data)
|
||||
print()
|
||||
print("bsddb tests")
|
||||
undo = DbUndoBSDDB(D(), '/tmp/testundo')
|
||||
undo.open()
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('fo2'))
|
||||
print(undo.append('fo3'))
|
||||
print(undo[1])
|
||||
undo[1] = 'bar'
|
||||
print(undo[1])
|
||||
for data in undo:
|
||||
print(data)
|
||||
print("len", len(undo))
|
||||
|
||||
print("test commit")
|
||||
undo.commit(T(), msg="test commit")
|
||||
undo.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
testundo()
|
File diff suppressed because it is too large
Load Diff
@ -41,6 +41,7 @@ from gramps.gen.db.dbconst import (DBLOGNAME, DBBACKEND, KEY_TO_NAME_MAP,
|
||||
TAG_KEY, CITATION_KEY, REPOSITORY_KEY,
|
||||
REFERENCE_KEY)
|
||||
from gramps.gen.db.generic import DbGeneric
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
from gramps.gen.lib import (Tag, Media, Person, Family, Source,
|
||||
Citation, Event, Place, Repository, Note)
|
||||
from gramps.gen.lib.genderstats import GenderStats
|
||||
@ -628,6 +629,29 @@ class DBAPI(DbGeneric):
|
||||
|
||||
return old_data
|
||||
|
||||
def _commit_raw(self, data, obj_key):
|
||||
"""
|
||||
Commit a serialized primary object to the database, storing the
|
||||
changes as part of the transaction.
|
||||
"""
|
||||
table = KEY_TO_NAME_MAP[obj_key]
|
||||
handle = data[0]
|
||||
|
||||
if self._has_handle(obj_key, handle):
|
||||
# update the object:
|
||||
sql = "UPDATE %s SET blob_data = ? WHERE handle = ?" % table
|
||||
self.dbapi.execute(sql,
|
||||
[pickle.dumps(data),
|
||||
handle])
|
||||
else:
|
||||
# Insert the object:
|
||||
sql = ("INSERT INTO %s (handle, blob_data) VALUES (?, ?)") % table
|
||||
self.dbapi.execute(sql,
|
||||
[handle,
|
||||
pickle.dumps(data)])
|
||||
|
||||
return
|
||||
|
||||
def _update_backlinks(self, obj, transaction):
|
||||
|
||||
# Find existing references
|
||||
@ -786,9 +810,14 @@ class DBAPI(DbGeneric):
|
||||
"""
|
||||
Reindex all primary records in the database.
|
||||
"""
|
||||
callback(4)
|
||||
self._txn_begin()
|
||||
self.dbapi.execute("DELETE FROM reference")
|
||||
total = 0
|
||||
for tbl in ('people', 'families', 'events', 'places', 'sources',
|
||||
'citations', 'media', 'repositories', 'notes', 'tags'):
|
||||
total += self.method("get_number_of_%s", tbl)()
|
||||
UpdateCallback.__init__(self, callback)
|
||||
self.set_total(total)
|
||||
primary_table = (
|
||||
(self.get_person_cursor, Person),
|
||||
(self.get_family_cursor, Family),
|
||||
@ -819,8 +848,8 @@ class DBAPI(DbGeneric):
|
||||
obj.__class__.__name__,
|
||||
ref_handle,
|
||||
ref_class_name])
|
||||
self.update()
|
||||
self._txn_commit()
|
||||
callback(5)
|
||||
|
||||
def rebuild_secondary(self, callback=None):
|
||||
"""
|
||||
@ -829,26 +858,26 @@ class DBAPI(DbGeneric):
|
||||
if self.readonly:
|
||||
return
|
||||
|
||||
total = 0
|
||||
for tbl in ('people', 'families', 'events', 'places', 'sources',
|
||||
'citations', 'media', 'repositories', 'notes', 'tags'):
|
||||
total += self.method("get_number_of_%s", tbl)()
|
||||
UpdateCallback.__init__(self, callback)
|
||||
self.set_total(total)
|
||||
|
||||
# First, expand blob to individual fields:
|
||||
self._txn_begin()
|
||||
index = 1
|
||||
for obj_type in ('Person', 'Family', 'Event', 'Place', 'Repository',
|
||||
'Source', 'Citation', 'Media', 'Note', 'Tag'):
|
||||
for handle in self.method('get_%s_handles', obj_type)():
|
||||
obj = self.method('get_%s_from_handle', obj_type)(handle)
|
||||
self._update_secondary_values(obj)
|
||||
if callback:
|
||||
callback(index)
|
||||
index += 1
|
||||
self.update()
|
||||
self._txn_commit()
|
||||
if callback:
|
||||
callback(11)
|
||||
|
||||
# Next, rebuild stats:
|
||||
gstats = self.get_gender_stats()
|
||||
self.genderStats = GenderStats(gstats)
|
||||
if callback:
|
||||
callback(12)
|
||||
|
||||
def _has_handle(self, obj_key, handle):
|
||||
table = KEY_TO_NAME_MAP[obj_key]
|
||||
|
@ -361,7 +361,7 @@ class DbTestClassBase(object):
|
||||
msg="Callback Manager disconnect cb check")
|
||||
|
||||
|
||||
params = [('BsdDb', 'bsddb'), ('SQLite', 'sqlite')]
|
||||
params = [('SQLite', 'sqlite')]
|
||||
|
||||
for name, param in params:
|
||||
cls_name = "TestMyTestClass_%s" % (name, )
|
||||
|
@ -207,16 +207,6 @@ class ExportControl(unittest.TestCase):
|
||||
call("-y", "-q", "--remove", TREE_NAME)
|
||||
|
||||
def test_csv(self):
|
||||
""" Run a csv export test """
|
||||
set_format(0) # Use ISO date for test
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample_csv.gramps'
|
||||
tst_file = 'exp_sample_csv.csv'
|
||||
msg = do_it(src_file, tst_file)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_csv_sq(self):
|
||||
""" Run a csv export test """
|
||||
set_format(0) # Use ISO date for test
|
||||
config.set('database.backend', 'sqlite')
|
||||
@ -227,16 +217,6 @@ class ExportControl(unittest.TestCase):
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_ged(self):
|
||||
""" Run a Gedcom export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample.gramps'
|
||||
tst_file = 'exp_sample_ged.ged'
|
||||
msg = do_it(src_file, tst_file, gedfilt)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_ged_sq(self):
|
||||
""" Run a Gedcom export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'sqlite')
|
||||
@ -247,16 +227,6 @@ class ExportControl(unittest.TestCase):
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_vcard(self):
|
||||
""" Run a vcard export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample.gramps'
|
||||
tst_file = 'exp_sample.vcf'
|
||||
msg = do_it(src_file, tst_file, vcffilt)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_vcard_sq(self):
|
||||
""" Run a vcard export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'sqlite')
|
||||
@ -268,17 +238,6 @@ class ExportControl(unittest.TestCase):
|
||||
|
||||
@patch('gramps.plugins.export.exportvcalendar.time.localtime', mock_localtime)
|
||||
def test_vcs(self):
|
||||
""" Run a Vcalandar export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample.gramps'
|
||||
tst_file = 'exp_sample.vcs'
|
||||
msg = do_it(src_file, tst_file)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
@patch('gramps.plugins.export.exportvcalendar.time.localtime', mock_localtime)
|
||||
def test_vcs_sq(self):
|
||||
""" Run a Vcalandar export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'sqlite')
|
||||
@ -289,16 +248,6 @@ class ExportControl(unittest.TestCase):
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_gw(self):
|
||||
""" Run a Geneweb export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample.gramps'
|
||||
tst_file = 'exp_sample.gw'
|
||||
msg = do_it(src_file, tst_file)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_gw_sq(self):
|
||||
""" Run a Geneweb export test """
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'sqlite')
|
||||
@ -309,17 +258,6 @@ class ExportControl(unittest.TestCase):
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_wft(self):
|
||||
""" Run a Web Family Tree export test """
|
||||
set_format(0) # Use ISO date for test
|
||||
config.set('preferences.place-auto', True)
|
||||
config.set('database.backend', 'bsddb')
|
||||
src_file = 'exp_sample.gramps'
|
||||
tst_file = 'exp_sample.wft'
|
||||
msg = do_it(src_file, tst_file)
|
||||
if msg:
|
||||
self.fail(tst_file + ': ' + msg)
|
||||
|
||||
def test_wft_sq(self):
|
||||
""" Run a Web Family Tree export test """
|
||||
set_format(0) # Use ISO date for test
|
||||
config.set('preferences.place-auto', True)
|
||||
|
@ -199,18 +199,16 @@ def make_tst_function(tstfile, file_name):
|
||||
"""
|
||||
|
||||
@patch('gramps.plugins.db.dbapi.dbapi.time')
|
||||
@patch('gramps.plugins.db.bsddb.write.time')
|
||||
@patch('gramps.gen.utils.unknown.localtime')
|
||||
@patch('gramps.gen.utils.unknown.time')
|
||||
@patch('time.localtime')
|
||||
def tst(self, mockptime, mocktime, mockltime, mockwtime, mockdtime):
|
||||
def tst(self, mockptime, mocktime, mockltime, mockdtime):
|
||||
""" This compares the import file with the expected result '.gramps'
|
||||
file.
|
||||
"""
|
||||
mockptime.side_effect = mock_localtime
|
||||
mocktime.side_effect = mock_time
|
||||
mockltime.side_effect = mock_localtime
|
||||
mockwtime.side_effect = mock_time
|
||||
mockdtime.side_effect = mock_time
|
||||
fn1 = os.path.join(TEST_DIR, tstfile)
|
||||
fn2 = os.path.join(TEST_DIR, (file_name + ".gramps"))
|
||||
|
@ -77,7 +77,7 @@ class ToolControl(unittest.TestCase):
|
||||
"""
|
||||
def setUp(self):
|
||||
self.db_backend = config.get('database.backend')
|
||||
call("--config=database.backend:bsddb", "-y", "-q", "--remove", TREE_NAME)
|
||||
call("--config=database.backend:sqlite", "-y", "-q", "--remove", TREE_NAME)
|
||||
|
||||
def tearDown(self):
|
||||
config.set('database.backend', self.db_backend)
|
||||
|
@ -51,14 +51,13 @@ log = logging.getLogger(".Rebuild")
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gui.plug import tool
|
||||
from gramps.gui.dialog import OkDialog
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# runTool
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
class Rebuild(tool.Tool, UpdateCallback):
|
||||
class Rebuild(tool.Tool):
|
||||
|
||||
def __init__(self, dbstate, user, options_class, name, callback=None):
|
||||
uistate = user.uistate
|
||||
@ -76,10 +75,7 @@ class Rebuild(tool.Tool, UpdateCallback):
|
||||
uistate.progress.show()
|
||||
uistate.push_message(dbstate, _("Rebuilding secondary indexes..."))
|
||||
|
||||
UpdateCallback.__init__(self, self.callback)
|
||||
self.set_total(12)
|
||||
self.db.rebuild_secondary(self.update)
|
||||
self.reset()
|
||||
self.db.rebuild_secondary(self.callback)
|
||||
|
||||
uistate.set_busy_cursor(False)
|
||||
uistate.progress.hide()
|
||||
@ -88,7 +84,7 @@ class Rebuild(tool.Tool, UpdateCallback):
|
||||
parent=uistate.window)
|
||||
else:
|
||||
print("Rebuilding Secondary Indexes...")
|
||||
self.db.rebuild_secondary(self.update_empty)
|
||||
self.db.rebuild_secondary(None)
|
||||
print("All secondary indexes have been rebuilt.")
|
||||
|
||||
self.db.enable_signals()
|
||||
|
@ -53,14 +53,13 @@ log = logging.getLogger(".RebuildRefMap")
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gui.plug import tool
|
||||
from gramps.gui.dialog import OkDialog
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# runTool
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
class RebuildRefMap(tool.Tool, UpdateCallback):
|
||||
class RebuildRefMap(tool.Tool):
|
||||
|
||||
def __init__(self, dbstate, user, options_class, name, callback=None):
|
||||
uistate = user.uistate
|
||||
@ -80,10 +79,7 @@ class RebuildRefMap(tool.Tool, UpdateCallback):
|
||||
self.callback = None
|
||||
print(_("Rebuilding reference maps..."))
|
||||
|
||||
UpdateCallback.__init__(self, self.callback)
|
||||
self.set_total(6)
|
||||
self.db.reindex_reference_map(self.update)
|
||||
self.reset()
|
||||
self.db.reindex_reference_map(self.callback)
|
||||
|
||||
if uistate:
|
||||
uistate.set_busy_cursor(False)
|
||||
|
Loading…
x
Reference in New Issue
Block a user