From b52c118673292b21a63613489c966eea8f5bfa59 Mon Sep 17 00:00:00 2001 From: prculley Date: Fri, 31 Jan 2020 08:10:28 -0600 Subject: [PATCH] Deprecate BSDDB, but allow to be loaded with convert to sqlite --- gramps/cli/grampscli.py | 37 +- gramps/gen/db/exceptions.py | 223 +- gramps/gen/db/generic.py | 62 +- .../{plugins/db/bsddb => gen/db}/upgrade.py | 1000 ++++--- gramps/gui/dbloader.py | 125 +- gramps/plugins/db/bsddb/__init__.py | 75 - gramps/plugins/db/bsddb/bsddb.gpr.py | 2 +- gramps/plugins/db/bsddb/bsddb.py | 244 +- gramps/plugins/db/bsddb/bsddbtxn.py | 239 -- gramps/plugins/db/bsddb/cursor.py | 134 - gramps/plugins/db/bsddb/read.py | 1948 -------------- gramps/plugins/db/bsddb/summary.py | 85 - gramps/plugins/db/bsddb/test/__init__.py | 0 gramps/plugins/db/bsddb/test/cursor_test.py | 228 -- gramps/plugins/db/bsddb/test/db_test.py | 257 -- .../plugins/db/bsddb/test/grampsdbtestbase.py | 164 -- .../db/bsddb/test/reference_map_test.py | 219 -- gramps/plugins/db/bsddb/undoredo.py | 548 ---- gramps/plugins/db/bsddb/write.py | 2320 ----------------- gramps/plugins/db/dbapi/dbapi.py | 23 + 20 files changed, 852 insertions(+), 7081 deletions(-) rename gramps/{plugins/db/bsddb => gen/db}/upgrade.py (60%) delete mode 100644 gramps/plugins/db/bsddb/__init__.py delete mode 100644 gramps/plugins/db/bsddb/bsddbtxn.py delete mode 100644 gramps/plugins/db/bsddb/cursor.py delete mode 100644 gramps/plugins/db/bsddb/read.py delete mode 100644 gramps/plugins/db/bsddb/summary.py delete mode 100644 gramps/plugins/db/bsddb/test/__init__.py delete mode 100644 gramps/plugins/db/bsddb/test/cursor_test.py delete mode 100644 gramps/plugins/db/bsddb/test/db_test.py delete mode 100644 gramps/plugins/db/bsddb/test/grampsdbtestbase.py delete mode 100644 gramps/plugins/db/bsddb/test/reference_map_test.py delete mode 100644 gramps/plugins/db/bsddb/undoredo.py delete mode 100644 gramps/plugins/db/bsddb/write.py diff --git a/gramps/cli/grampscli.py b/gramps/cli/grampscli.py index fb7e2454d..03565f3d2 100644 --- a/gramps/cli/grampscli.py +++ b/gramps/cli/grampscli.py @@ -52,14 +52,9 @@ from gramps.gen.db.utils import make_database from gramps.gen.errors import DbError from gramps.gen.dbstate import DbState from gramps.gen.db.exceptions import (DbUpgradeRequiredError, - BsddbDowngradeError, + DbSupportedError, DbVersionError, DbPythonError, - DbEnvironmentError, - BsddbUpgradeRequiredError, - BsddbDowngradeRequiredError, - PythonUpgradeRequiredError, - PythonDowngradeError, DbConnectionError) from gramps.gen.plug import BasePluginManager from gramps.gen.utils.config import get_researcher @@ -176,34 +171,8 @@ class CLIDbLoader: try: self.dbstate.db.load(filename, self._pulse_progress, mode, username=username, password=password) - except DbEnvironmentError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except BsddbUpgradeRequiredError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except BsddbDowngradeRequiredError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except BsddbDowngradeError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except DbUpgradeRequiredError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except PythonDowngradeError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except PythonUpgradeRequiredError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except DbVersionError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except DbPythonError as msg: - self.dbstate.no_database() - self._errordialog(_("Cannot open database"), str(msg)) - except DbConnectionError as msg: + except (DbConnectionError, DbSupportedError, DbUpgradeRequiredError, + DbVersionError, DbPythonError, DbConnectionError) as msg: self.dbstate.no_database() self._errordialog(_("Cannot open database"), str(msg)) except OSError as msg: diff --git a/gramps/gen/db/exceptions.py b/gramps/gen/db/exceptions.py index 9fb2ee346..ab23ca9d7 100644 --- a/gramps/gen/db/exceptions.py +++ b/gramps/gen/db/exceptions.py @@ -122,121 +122,6 @@ class DbPythonError(Exception): 'min_vers': self.min_vers, 'max_vers': self.max_vers} -class BsddbDowngradeError(Exception): - """ - Error used to report that the Berkeley database used to create the family - tree is of a version that is too new to be supported by the current version. - """ - def __init__(self, env_version, bdb_version): - Exception.__init__(self) - self.env_version = str(env_version) - self.bdb_version = str(bdb_version) - - def __str__(self): - return _('The Family Tree you are trying to load is in the Bsddb ' - 'version %(env_version)s format. This version of Gramps uses ' - 'Bsddb version %(bdb_version)s. So you are trying to load ' - 'data created in a newer format into an older program, and ' - 'this is bound to fail.\n\n' - 'You should start your ' - '%(bold_start)snewer%(bold_end)s version of Gramps and ' - '%(wiki_backup_html_start)smake a backup%(html_end)s ' - 'of your Family Tree. You can then import ' - 'this backup into this version of Gramps.') % { - 'wiki_backup_html_start' : URL_BACKUP1_START , - 'html_end' : '' , - 'bold_start' : '' , - 'bold_end' : '' , - 'env_version' : self.env_version, - 'bdb_version' : self.bdb_version } - -class BsddbDowngradeRequiredError(Exception): - """ - Error used to report that the Berkeley database used to create the family - tree is of a version that is newer than the current version, but it may be - possible to open the tree, because the difference is only a point upgrade - (i.e. a difference in the last digit of the version tuple). - """ - def __init__(self, env_version, bdb_version): - Exception.__init__(self) - self.env_version = str(env_version) - self.bdb_version = str(bdb_version) - - def __str__(self): - return _('The Family Tree you are trying to load is in the Bsddb ' - 'version %(env_version)s format. This version of Gramps uses ' - 'Bsddb version %(bdb_version)s. So you are trying to load ' - 'data created in a newer format into an older program. In ' - 'this particular case, the difference is very small, so it ' - 'may work.\n\n' - 'If you have not already made a backup of your Family Tree, ' - 'then you should start your ' - '%(bold_start)snewer%(bold_end)s version of Gramps and ' - '%(wiki_backup_html_start)smake a backup%(html_end)s ' - 'of your Family Tree.') % { - 'wiki_backup_html_start' : URL_BACKUP1_START , - 'html_end' : '' , - 'bold_start' : '' , - 'bold_end' : '' , - 'env_version' : self.env_version, - 'bdb_version' : self.bdb_version } - -class BsddbUpgradeRequiredError(Exception): - """ - Error used to report that the Berkeley database used to create the family - tree is of a version that is too new to be supported by the current version. - """ - def __init__(self, env_version, bsddb_version): - Exception.__init__(self) - self.env_version = str(env_version) - self.bsddb_version = str(bsddb_version) - - def __str__(self): - return _('The Family Tree you are trying to load is in the Bsddb ' - 'version %(env_version)s format. This version of Gramps uses ' - 'Bsddb version %(bdb_version)s. Therefore you cannot load ' - 'this Family Tree without upgrading the Bsddb version of the ' - 'Family Tree.\n\n' - 'Opening the Family Tree with this version of Gramps might ' - 'irretrievably corrupt your Family Tree. You are strongly ' - 'advised to backup your Family Tree.\n\n' - 'If you have not already made a backup of your Family Tree, ' - 'then you should start your %(bold_start)sold%(bold_end)s ' - 'version of Gramps and ' - '%(wiki_backup_html_start)smake a backup%(html_end)s ' - 'of your Family Tree.') % { - 'wiki_backup_html_start' : URL_BACKUP1_START , - 'html_end' : '' , - 'bold_start' : '' , - 'bold_end' : '' , - 'env_version' : self.env_version, - 'bdb_version' : self.bsddb_version } - -class DbEnvironmentError(Exception): - """ - Error used to report that the database 'environment' could not be opened. - Most likely, the database was created by a different version of the underlying database engine. - """ - def __init__(self, msg): - Exception.__init__(self) - self.msg = msg - - def __str__(self): - return (_("Gramps has detected a problem in opening the 'environment' " - "of the underlying Berkeley database used to store this " - "Family Tree. The most likely cause " - "is that the database was created with an old version " - "of the Berkeley database program, " - "and you are now using a new version. " - "It is quite likely that your database has not been " - "changed by Gramps.\nIf possible, you should revert to your " - "old version of Gramps and its support software; export " - "your database to XML; close the database; " - "then upgrade again " - "to this version of Gramps and import the XML file " - "in an empty Family Tree. Alternatively, it may be possible " - "to use the Berkeley database recovery tools.") - + '\n\n' + str(self.msg)) class DbUpgradeRequiredError(Exception): """ @@ -275,73 +160,6 @@ class DbUpgradeRequiredError(Exception): 'oldschema' : self.oldschema, 'newschema' : self.newschema } -class PythonDowngradeError(Exception): - """ - Error used to report that the Python version used to create the family tree - (i.e. Python3) is a version that is newer than the current version - (i.e. Python2), so the Family Tree cannot be opened - """ - def __init__(self, db_python_version, current_python_version): - Exception.__init__(self) - self.db_python_version = str(db_python_version) - self.current_python_version = str(current_python_version) - - def __str__(self): - return _('The Family Tree you are trying to load was created with ' - 'Python version %(db_python_version)s. This version of Gramps ' - 'uses Python version %(current_python_version)s. So you are ' - 'trying to load ' - 'data created in a newer format into an older program, and ' - 'this is bound to fail.\n\n' - 'You should start your ' - '%(bold_start)snewer%(bold_end)s version of Gramps and ' - '%(wiki_backup_html_start)smake a backup%(html_end)s ' - 'of your Family Tree. You can then import ' - 'this backup into this version of Gramps.') % { - 'wiki_backup_html_start' : URL_BACKUP1_START , - 'html_end' : '' , - 'bold_start' : '' , - 'bold_end' : '' , - 'db_python_version': self.db_python_version, - 'current_python_version': self.current_python_version } - -class PythonUpgradeRequiredError(Exception): - """ - Error used to report that the Python version used to create the family tree - (i.e. Python2) is earlier than the current Python version (i.e. Python3), so - the Family Tree needs to be upgraded. - """ - def __init__(self, db_python_version, current_python_version): - Exception.__init__(self) - self.db_python_version = str(db_python_version) - self.current_python_version = str(current_python_version) - - def __str__(self): - return _('The Family Tree you are trying to load is in the Python ' - 'version %(db_python_version)s format. This version of Gramps ' - 'uses Python version %(current_python_version)s. Therefore ' - 'you cannot load this Family Tree without upgrading the ' - 'Python version of the Family Tree.\n\n' - 'If you upgrade then you won\'t be able to use the previous ' - 'version of Gramps, even if you subsequently ' - '%(wiki_manual_backup_html_start)sbackup%(html_end)s or ' - '%(wiki_manual_export_html_start)sexport%(html_end)s ' - 'your upgraded Family Tree.\n\n' - 'Upgrading is a difficult task which could irretrievably ' - 'corrupt your Family Tree if it is interrupted or fails.\n\n' - 'If you have not already made a backup of your Family Tree, ' - 'then you should start your %(bold_start)sold%(bold_end)s ' - 'version of Gramps and ' - '%(wiki_backup_html_start)smake a backup%(html_end)s ' - 'of your Family Tree.') % { - 'wiki_backup_html_start' : URL_BACKUP1_START , - 'wiki_manual_backup_html_start' : URL_BACKUP2_START , - 'wiki_manual_export_html_start' : URL_EXPORT_START , - 'html_end' : '' , - 'bold_start' : '' , - 'bold_end' : '' , - 'db_python_version': self.db_python_version, - 'current_python_version': self.current_python_version } class DbConnectionError(Exception): """ @@ -360,6 +178,35 @@ class DbConnectionError(Exception): 'message': self.msg, 'settings_file': self.settings_file} + +class DbSupportedError(Exception): + """ + Error used to report that a database is no longer supported. + """ + def __init__(self, msg): + Exception.__init__(self) + self.msg = msg + + def __str__(self): + return _('The Family Tree you are trying to load is in the %(dbtype)s ' + 'database, which is no longer supported.\nTherefore you ' + 'cannot load this Family Tree without upgrading.\n\n' + 'If you upgrade then you won\'t be able to use the previous ' + 'version of Gramps, even if you subsequently ' + '%(wiki_manual_backup_html_start)sbackup%(html_end)s or ' + '%(wiki_manual_export_html_start)sexport%(html_end)s ' + 'your upgraded Family Tree.\n\n' + 'You are strongly advised to backup your Family Tree.\n\n' + 'If you have not already made a backup of your Family Tree, ' + 'then you should start your previous version of Gramps and ' + '%(wiki_backup_html_start)smake a backup%(html_end)s ' + 'of your Family Tree.') % { + 'dbtype' : self.msg, + 'wiki_manual_backup_html_start' : URL_BACKUP2_START , + 'wiki_manual_export_html_start' : URL_EXPORT_START , + 'wiki_backup_html_start' : URL_BACKUP1_START , + 'html_end' : ''} + if __name__ == "__main__": """ Call this from the CLI (in order to find the imported modules): @@ -370,20 +217,8 @@ if __name__ == "__main__": print("\nDbVersionError:\n", DbVersionError('1.6.0', '1.5.0', '1.5.1')) - print("\nBsddbDowngradeError:\n", - BsddbDowngradeError('4.8.30', '4.8.29')) - print("\nBsddbDowngradeRequiredError:\n", - BsddbDowngradeRequiredError('4.8.30', '4.8.29')) - print("\nBsddbUpgradeRequiredError:\n", - BsddbUpgradeRequiredError('4.8.29', '4.8.30')) - print("\nDbEnvironmentError:\n", - DbEnvironmentError('test message')) print("\nDbUpgradeRequiredError:\n", DbUpgradeRequiredError('1.5.1', '1.6.0')) - print("\nPythonDowngradeError:\n", - PythonDowngradeError('3', '2')) - print("\nPythonUpgradeRequiredError:\n", - PythonUpgradeRequiredError('2', '3')) sys.exit(0) print("\nxxx:\n", diff --git a/gramps/gen/db/generic.py b/gramps/gen/db/generic.py index ba445c9d8..da6b0e97a 100644 --- a/gramps/gen/db/generic.py +++ b/gramps/gen/db/generic.py @@ -47,6 +47,7 @@ from . import (DbReadBase, DbWriteBase, DbUndo, DBLOGNAME, DBUNDOFN, REPOSITORY_KEY, NOTE_KEY, TAG_KEY, TXNADD, TXNUPD, TXNDEL, KEY_TO_NAME_MAP, DBMODE_R, DBMODE_W) from .utils import write_lock_file, clear_lock_file +from .exceptions import DbVersionError, DbUpgradeRequiredError from ..errors import HandleError from ..utils.callback import Callback from ..updatecallback import UpdateCallback @@ -311,7 +312,7 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback): __callback_map = {} - VERSION = (18, 0, 0) + VERSION = (20, 0, 0) def __init__(self, directory=None): DbReadBase.__init__(self) @@ -659,6 +660,21 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback): self.db_is_open = True + # Check on db version to see if we need upgrade or too new + dbversion = int(self._get_metadata('version', default='0')) + if dbversion > self.VERSION[0]: + self.close() + raise DbVersionError(dbversion, 18, self.VERSION[0]) + + if not self.readonly and dbversion < self.VERSION[0]: + LOG.debug("Schema upgrade required from %s to %s", + dbversion, self.VERSION[0]) + if force_schema_upgrade: + self._gramps_upgrade(dbversion, directory, callback) + else: + self.close() + raise DbUpgradeRequiredError(dbversion, self.VERSION[0]) + def _close(self): """ Close database backend. @@ -2463,3 +2479,47 @@ class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback): enclosed_by = placeref.ref break return enclosed_by + + def _gramps_upgrade(self, version, directory, callback=None): + """ + Here we do the calls for stepwise schema upgrades. + We assume that we need to rebuild secondary and reference maps. + """ + UpdateCallback.__init__(self, callback) + + start = time.time() + + from gramps.gen.db.upgrade import ( + gramps_upgrade_14, gramps_upgrade_15, gramps_upgrade_16, + gramps_upgrade_17, gramps_upgrade_18, gramps_upgrade_19, + gramps_upgrade_20) + + if version < 14: + gramps_upgrade_14(self) + if version < 15: + gramps_upgrade_15(self) + if version < 16: + gramps_upgrade_16(self) + if version < 17: + gramps_upgrade_17(self) + if version < 18: + gramps_upgrade_18(self) + if version < 19: + gramps_upgrade_19(self) + if version < 20: + gramps_upgrade_20(self) + + self.rebuild_secondary() + self.reindex_reference_map(self.update) + self.reset() + + self.set_schema_version(self.VERSION[0]) + LOG.debug("Upgrade time: %d seconds" % int(time.time() - start)) + + def get_schema_version(self): + """ Return current schema version as an int """ + return int(self._get_metadata('version', default='0')) + + def set_schema_version(self, value): + """ set the current schema version """ + self._set_metadata('version', str(value)) diff --git a/gramps/plugins/db/bsddb/upgrade.py b/gramps/gen/db/upgrade.py similarity index 60% rename from gramps/plugins/db/bsddb/upgrade.py rename to gramps/gen/db/upgrade.py index 48fd189dd..24464e5b9 100644 --- a/gramps/plugins/db/bsddb/upgrade.py +++ b/gramps/gen/db/upgrade.py @@ -1,8 +1,8 @@ # # Gramps - a GTK+/GNOME based genealogy program # -# Copyright (C) 2004-2006 Donald N. Allingham -# Copyright (C) 2011 Tim G L Lyons +# Copyright (C) 2020-2016 Gramps Development Team +# Copyright (C) 2020 Paul Culley # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -18,89 +18,72 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # - -""" -methods to upgrade a database from version 13 to current version -""" - -#------------------------------------------------------------------------- +""" Generic upgrade module for dbapi dbs """ +#------------------------------------------------------------------------ # -# Standard python modules +# Python Modules # -#------------------------------------------------------------------------- +#------------------------------------------------------------------------ import os import re import time import logging -from bsddb3 import db - -#------------------------------------------------------------------------- +#------------------------------------------------------------------------ # -# Gramps modules +# Gramps Modules # -#------------------------------------------------------------------------- -from gramps.gen.const import GRAMPS_LOCALE as glocale -_ = glocale.translation.gettext -from gramps.gen.lib.markertype import MarkerType -from gramps.gen.lib.nameorigintype import NameOriginType -from gramps.gen.lib.eventtype import EventType -from gramps.gen.lib.tag import Tag +#------------------------------------------------------------------------ +from gramps.cli.clidbman import NAME_FILE +from gramps.gen.lib import EventType, NameOriginType, Tag, MarkerType from gramps.gen.utils.file import create_checksum from gramps.gen.utils.id import create_id -from . import BSDDBTxn -from .write import _mkname, SURNAMES -from gramps.gen.db.dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY, - MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY, - SOURCE_KEY) from gramps.gui.dialog import (InfoDialog) +from .dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY, + REPOSITORY_KEY, CITATION_KEY, SOURCE_KEY, NOTE_KEY, + TAG_KEY) +from ..const import GRAMPS_LOCALE as glocale +_ = glocale.translation.gettext LOG = logging.getLogger(".upgrade") -def gramps_upgrade_pickle(self): - """ - Upgrade to python3 pickle protocol. - """ - import pickle - tables = (self.person_map, self.event_map, self.family_map, self.place_map, - self.repository_map, self.source_map, self.citation_map, - self.media_map, self.note_map, self.tag_map, self.metadata, - self.reference_map) - self.set_total(sum(map(len, tables))) - for data_map in tables: - for handle in data_map.keys(): - raw = data_map.db.get(handle) - data = pickle.loads(raw, encoding='utf-8') - with BSDDBTxn(self.env, data_map) as txn: - txn.put(handle, data) - self.update() - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'upgraded', 'Yes') +def gramps_upgrade_20(self): + """ + Placeholder update. + """ + length = 0 + self.set_total(length) + self._txn_begin() + + # uid and place upgrade code goes here + + self._txn_commit() + # Bump up database version. Separate transaction to save metadata. + self._set_metadata('version', 20) + def gramps_upgrade_19(self): """ Upgrade database from version 18 to 19. """ - default_handle = self.metadata.get(b'default') - with BSDDBTxn(self.env, self.metadata) as txn: - if isinstance(default_handle, bytes): - default_handle = default_handle.decode('utf-8') - txn.put(b'default', default_handle) - txn.put(b'version', 19) + # This is done in the conversion from bsddb, so just say we did it. + self._set_metadata('version', 19) + def gramps_upgrade_18(self): """ Upgrade database from version 17 to 18. """ - length = len(self.place_map) + length = self.get_number_of_places() self.set_total(length) + self._txn_begin() # --------------------------------- # Modify Place # --------------------------------- # Convert name fields to use PlaceName. - for handle in self.place_map.keys(): - place = self.place_map[handle] + for handle in self.get_place_handles(): + place = self.get_raw_place_data(handle) new_place = list(place) new_place[6] = (new_place[6], None, '') alt_names = [] @@ -108,15 +91,13 @@ def gramps_upgrade_18(self): alt_names.append((name, None, '')) new_place[7] = alt_names new_place = tuple(new_place) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_place) + self._commit_raw(new_place, PLACE_KEY) self.update() + self._txn_commit() # Bump up database version. Separate transaction to save metadata. - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', 18) + self._set_metadata('version', 18) + def gramps_upgrade_17(self): """ @@ -129,18 +110,19 @@ def gramps_upgrade_17(self): 4. Add checksum field to media objects. 5. Rebuild list of custom events. """ - length = (len(self.event_map) + len(self.place_map) + - len(self.repository_map) + len(self.source_map) + - len(self.citation_map) + len(self.media_map)) + length = (self.get_number_of_events() + self.get_number_of_places() + + self.get_number_of_citations() + self.get_number_of_sources() + + self.get_number_of_repositories() + self.get_number_of_media()) self.set_total(length) + self._txn_begin() # --------------------------------- # Modify Event # --------------------------------- # Add new tag_list field. self.event_names = set() - for handle in self.event_map.keys(): - event = self.event_map[handle] + for handle in self.get_event_handles(): + event = self.get_raw_event_data(handle) new_event = list(event) event_type = EventType() event_type.unserialize(new_event[2]) @@ -148,10 +130,7 @@ def gramps_upgrade_17(self): self.event_names.add(str(event_type)) new_event = new_event[:11] + [[]] + new_event[11:] new_event = tuple(new_event) - with BSDDBTxn(self.env, self.event_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_event) + self._commit_raw(new_event, EVENT_KEY) self.update() # --------------------------------- @@ -161,8 +140,8 @@ def gramps_upgrade_17(self): locations = {} self.max_id = 0 index = re.compile('[0-9]+') - for handle in self.place_map.keys(): - place = self.place_map[handle] + for handle in self.get_place_handles(): + place = self.get_raw_place_data(handle) match = index.search(place[1]) if match: if self.max_id <= int(match.group(0)): @@ -170,8 +149,8 @@ def gramps_upgrade_17(self): if place[5] is not None: locations[get_location(place[5])] = handle - for handle in list(self.place_map.keys()): - place = self.place_map[handle] + for handle in list(self.get_place_handles()): + place = self.get_raw_place_data(handle) new_place = list(place) zip_code = '' @@ -203,7 +182,8 @@ def gramps_upgrade_17(self): if loc[n]: # TODO for Arabic, should the next line's comma be translated? title = ', '.join([item for item in loc[n:] if item]) - parent_handle = add_place(self, loc[n], n, parent_handle, title) + parent_handle = add_place( + self, loc[n], n, parent_handle, title) locations[tuple([''] * n + loc[n:])] = parent_handle n -= 1 @@ -217,102 +197,85 @@ def gramps_upgrade_17(self): else: name = new_place[2] type_num = -1 - new_place = new_place[:5] + [placeref_list, name, [], - (type_num, ''), zip_code] + \ - new_place[6:12] + [[]] + new_place[12:] + new_place = (new_place[:5] + [ + placeref_list, name, [], (type_num, ''), zip_code] + + new_place[6:12] + [[]] + new_place[12:]) new_place = tuple(new_place) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_place) + self._commit_raw(new_place, PLACE_KEY) self.update() # --------------------------------- # Modify Repository # --------------------------------- # Add new tag_list field. - for handle in self.repository_map.keys(): - repository = self.repository_map[handle] + for handle in self.get_repository_handles(): + repository = self.get_raw_repository_data(handle) new_repository = list(repository) new_repository = new_repository[:8] + [[]] + new_repository[8:] new_repository = tuple(new_repository) - with BSDDBTxn(self.env, self.repository_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_repository) + self._commit_raw(new_repository, REPOSITORY_KEY) self.update() # --------------------------------- # Modify Source # --------------------------------- # Add new tag_list field. - for handle in self.source_map.keys(): - source = self.source_map[handle] + for handle in self.get_source_handles(): + source = self.get_raw_source_data(handle) new_source = list(source) new_source = new_source[:11] + [[]] + new_source[11:] new_source = tuple(new_source) - with BSDDBTxn(self.env, self.source_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_source) + self._commit_raw(new_source, SOURCE_KEY) self.update() # --------------------------------- # Modify Citation # --------------------------------- # Add new tag_list field. - for handle in self.citation_map.keys(): - citation = self.citation_map[handle] + for handle in self.get_citation_handles(): + citation = self.get_raw_citation_data(handle) new_citation = list(citation) new_citation = new_citation[:10] + [[]] + new_citation[10:] new_citation = tuple(new_citation) - with BSDDBTxn(self.env, self.citation_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_citation) + self._commit_raw(new_citation, CITATION_KEY) self.update() # ------------------------------------------------------- # Upgrade Source and Citation datamap to SrcAttributeBase # ------------------------------------------------------- - for handle in self.source_map.keys(): - source = self.source_map[handle] + for handle in self.get_source_handles(): + source = self.get_raw_source_data(handle) (handle, gramps_id, title, author, pubinfo, notelist, medialist, abbrev, change, datamap, reporef_list, taglist, private) = source srcattributelist = upgrade_datamap_17(datamap) new_source = (handle, gramps_id, title, author, pubinfo, - notelist, medialist, abbrev, change, srcattributelist, reporef_list, - taglist, private) - with BSDDBTxn(self.env, self.source_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_source) + notelist, medialist, abbrev, change, srcattributelist, + reporef_list, taglist, private) + self._commit_raw(new_source, SOURCE_KEY) self.update() - for handle in self.citation_map.keys(): - citation = self.citation_map[handle] + for handle in self.get_citation_handles(): + citation = self.get_raw_citation_data(handle) (handle, gramps_id, datelist, page, confidence, source_handle, notelist, medialist, datamap, change, taglist, private) = citation srcattributelist = upgrade_datamap_17(datamap) - new_citation = (handle, gramps_id, datelist, page, confidence, source_handle, - notelist, medialist, srcattributelist, change, taglist, private) - with BSDDBTxn(self.env, self.citation_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_citation) + new_citation = (handle, gramps_id, datelist, page, confidence, + source_handle, notelist, medialist, srcattributelist, + change, taglist, private) + self._commit_raw(new_citation, CITATION_KEY) self.update() # --------------------------------- # Modify Media # --------------------------------- # Add new checksum field. - base_path = self.metadata.get(b'mediapath') - if base_path is None: + base_path = self._get_metadata('media-path') + if not base_path: # Check that the mediapath is not set to None (bug #7844). base_path = '' - for handle in self.media_map.keys(): - media = self.media_map[handle] + for handle in self.get_media_handles(): + media = self.get_raw_media_data(handle) new_media = list(media) if os.path.isabs(new_media[2]): full_path = new_media[2] @@ -321,15 +284,13 @@ def gramps_upgrade_17(self): checksum = create_checksum(full_path) new_media = new_media[:5] + [checksum] + new_media[5:] new_media = tuple(new_media) - with BSDDBTxn(self.env, self.media_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_media) + self._commit_raw(new_media, MEDIA_KEY) self.update() + self._txn_commit() # Bump up database version. Separate transaction to save metadata. - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', 17) + self._set_metadata('version', 17) + def get_location(loc): # (street, locality, parish, city, county, state, country) @@ -339,27 +300,26 @@ def get_location(loc): location = loc[0][:2] + (loc[1],) + loc[0][2:6] return location + def add_place(self, name, level, parent, title): handle = create_id() self.max_id += 1 gid = self.place_prefix % self.max_id - placetype = (7-level, '') + placetype = (7 - level, '') if parent is not None: placeref_list = [(parent.decode('utf-8'), None)] else: placeref_list = [] place = (handle, gid, title, '', '', placeref_list, name, [], placetype, '', [], [], [], [], [], 0, [], False) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, place) + self._commit_raw(place, PLACE_KEY) return handle + def upgrade_datamap_17(datamap): """ - In version 16 key value pairs are stored in source and citation. These become - SrcAttribute + In version 16 key value pairs are stored in source and citation. + These become SrcAttribute """ new_srcattr_list = [] private = False @@ -369,6 +329,7 @@ def upgrade_datamap_17(datamap): new_srcattr_list.append((private, the_type, value)) return new_srcattr_list + def gramps_upgrade_16(self): """ Upgrade database from version 15 to 16. This upgrade converts all @@ -388,33 +349,41 @@ def gramps_upgrade_16(self): # Only People, Families, Events, Media Objects, Places, Sources and # Repositories need to be updated, because these are the only primary # objects that can have source citations. - length = (len(self.person_map) + - len(self.event_map) + len(self.family_map) + - len(self.repository_map) + len(self.media_map) + - len(self.place_map) + len(self.source_map)) + length = (self.get_number_of_people() + + self.get_number_of_events() + self.get_number_of_families() + + self.get_number_of_repositories() + self.get_number_of_media() + + self.get_number_of_places() + self.get_number_of_sources()) self.set_total(length) + self._txn_begin() # Setup data for upgrade statistics information dialogue keyorder = [PERSON_KEY, FAMILY_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY, SOURCE_KEY] key2data = { - PERSON_KEY : 0, - FAMILY_KEY : 1, - EVENT_KEY: 2, - MEDIA_KEY: 3, - PLACE_KEY: 4, - REPOSITORY_KEY: 5, - SOURCE_KEY : 6, - } + PERSON_KEY : 0, + FAMILY_KEY : 1, + EVENT_KEY: 2, + MEDIA_KEY: 3, + PLACE_KEY: 4, + REPOSITORY_KEY: 5, + SOURCE_KEY : 6, + } key2string = { - PERSON_KEY : _('%(n1)6d People upgraded with %(n2)6d citations in %(n3)6d secs\n'), - FAMILY_KEY : _('%(n1)6d Families upgraded with %(n2)6d citations in %(n3)6d secs\n'), - EVENT_KEY : _('%(n1)6d Events upgraded with %(n2)6d citations in %(n3)6d secs\n'), - MEDIA_KEY : _('%(n1)6d Media Objects upgraded with %(n2)6d citations in %(n3)6d secs\n'), - PLACE_KEY : _('%(n1)6d Places upgraded with %(n2)6d citations in %(n3)6d secs\n'), - REPOSITORY_KEY : _('%(n1)6d Repositories upgraded with %(n2)6d citations in %(n3)6d secs\n'), - SOURCE_KEY : _('%(n1)6d Sources upgraded with %(n2)6d citations in %(n3)6d secs\n'), - } + PERSON_KEY : _('%(n1)6d People upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + FAMILY_KEY : _('%(n1)6d Families upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + EVENT_KEY : _('%(n1)6d Events upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + MEDIA_KEY : _('%(n1)6d Media Objects upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + PLACE_KEY : _('%(n1)6d Places upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + REPOSITORY_KEY : _('%(n1)6d Repositories upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + SOURCE_KEY : _('%(n1)6d Sources upgraded with ' + '%(n2)6d citations in %(n3)6d secs\n'), + } data_upgradeobject = [0] * 7 # Initialise the citation gramps ID number @@ -425,12 +394,12 @@ def gramps_upgrade_16(self): # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for person_handle in self.person_map.keys(): - person = self.person_map[person_handle] + for person_handle in self.get_person_handles(): + person = self.get_raw_person_data(person_handle) try: - # The parameters are evaluated before deciding whether logging is on - # or not. Since the retrieval of names is so complex, I think it is - # safer to protect this with a try except block, even though it + # The parameters are evaluated before deciding whether logging is + # on or not. Since the retrieval of names is so complex, I think it + # is safer to protect this with a try except block, even though it # seems to work for names being present and not. LOG.debug("upgrade person %s %s" % (person[3][4], " ".join([name[0] for name in person[3][5]]))) @@ -444,33 +413,27 @@ def gramps_upgrade_16(self): if primary_name: primary_name = upgrade_name_16(self, primary_name) if alternate_names: - alternate_names = upgrade_name_list_16( - self, alternate_names) + alternate_names = upgrade_name_list_16(self, alternate_names) if address_list: - address_list = upgrade_address_list_16( - self, address_list) + address_list = upgrade_address_list_16(self, address_list) if media_list: - media_list = upgrade_media_list_16( - self, media_list) + media_list = upgrade_media_list_16(self, media_list) if attribute_list: - attribute_list = upgrade_attribute_list_16( - self, attribute_list) + attribute_list = upgrade_attribute_list_16(self, attribute_list) if lds_seal_list: - lds_seal_list = upgrade_lds_seal_list_16( - self, lds_seal_list) + lds_seal_list = upgrade_lds_seal_list_16(self, lds_seal_list) if source_list: new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) else: new_citation_list = [] if person_ref_list: - person_ref_list = upgrade_person_ref_list_16( - self, person_ref_list) + person_ref_list = upgrade_person_ref_list_16(self, person_ref_list) if event_ref_list: event_ref_list = upgrade_event_ref_list_16(self, event_ref_list) - if primary_name or alternate_names or address_list or \ - media_list or attribute_list or lds_seal_list or source_list or \ - person_ref_list or event_ref_list: + if(primary_name or alternate_names or address_list or media_list or + attribute_list or lds_seal_list or source_list or + person_ref_list or event_ref_list): new_person = (handle, gramps_id, gender, primary_name, alternate_names, death_ref_index, birth_ref_index, event_ref_list, family_list, @@ -479,100 +442,89 @@ def gramps_upgrade_16(self): new_citation_list, note_list, change, tag_list, private, person_ref_list) LOG.debug(" upgrade new_person %s" % [new_person]) - with BSDDBTxn(self.env, self.person_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_person) + self._commit_raw(new_person, PERSON_KEY) self.update() LOG.debug("%d persons upgraded with %d citations in %d seconds. " % - (len(list(self.person_map.keys())), + (self.get_number_of_people(), self.cmap_index - start_num_citations, time.time() - start_time)) - data_upgradeobject[key2data[PERSON_KEY]] = (len(list(self.person_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[PERSON_KEY]] = ( + self.get_number_of_people(), self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Media # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for media_handle in self.media_map.keys(): - media = self.media_map[media_handle] + for media_handle in self.get_media_handles(): + media = self.get_raw_media_data(media_handle) LOG.debug("upgrade media object %s" % media[4]) (handle, gramps_id, path, mime, desc, attribute_list, source_list, note_list, change, date, tag_list, private) = media new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) - new_attribute_list = upgrade_attribute_list_16( - self, attribute_list) + self, source_list) + new_attribute_list = upgrade_attribute_list_16(self, attribute_list) new_media = (handle, gramps_id, path, mime, desc, new_attribute_list, new_citation_list, note_list, change, date, tag_list, private) LOG.debug(" upgrade new_media %s" % [new_media]) - with BSDDBTxn(self.env, self.media_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_media) + self._commit_raw(new_media, MEDIA_KEY) self.update() LOG.debug("%d media objects upgraded with %d citations in %d seconds" % - (len(self.media_map.keys()), + (self.get_number_of_media(), self.cmap_index - start_num_citations, int(time.time() - start_time))) - data_upgradeobject[key2data[MEDIA_KEY]] = (len(list(self.media_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[MEDIA_KEY]] = ( + self.get_number_of_media(), self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Places # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for place_handle in self.place_map.keys(): - place = self.place_map[place_handle] + for place_handle in self.get_place_handles(): + place = self.get_raw_place_data(place_handle) LOG.debug("upgrade place %s" % place[2]) (handle, gramps_id, title, longi, lat, main_loc, alt_loc, urls, media_list, source_list, note_list, change, private) = place if source_list: new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) else: new_citation_list = [] if media_list: - media_list = upgrade_media_list_16( - self, media_list) + media_list = upgrade_media_list_16(self, media_list) if source_list or media_list: new_place = (handle, gramps_id, title, longi, lat, main_loc, alt_loc, urls, media_list, new_citation_list, note_list, change, private) LOG.debug(" upgrade new_place %s" % [new_place]) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_place) + self._commit_raw(new_place, PLACE_KEY) self.update() LOG.debug("%d places upgraded with %d citations in %d seconds. " % - (len(list(self.place_map.keys())), + (self.get_number_of_places(), self.cmap_index - start_num_citations, time.time() - start_time)) - data_upgradeobject[key2data[PLACE_KEY]] = (len(list(self.place_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[PLACE_KEY]] = ( + self.get_number_of_places(), self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Families # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for family_handle in self.family_map.keys(): - family = self.family_map[family_handle] + for family_handle in self.get_family_handles(): + family = self.get_raw_family_data(family_handle) LOG.debug("upgrade family (gramps_id) %s" % family[1]) (handle, gramps_id, father_handle, mother_handle, child_ref_list, the_type, event_ref_list, media_list, @@ -580,166 +532,144 @@ def gramps_upgrade_16(self): change, tag_list, private) = family if source_list: new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) else: new_citation_list = [] if child_ref_list: - child_ref_list = upgrade_child_ref_list_16( - self, child_ref_list) + child_ref_list = upgrade_child_ref_list_16(self, child_ref_list) if lds_seal_list: - lds_seal_list = upgrade_lds_seal_list_16( - self, lds_seal_list) + lds_seal_list = upgrade_lds_seal_list_16(self, lds_seal_list) if media_list: - media_list = upgrade_media_list_16( - self, media_list) + media_list = upgrade_media_list_16(self, media_list) if attribute_list: - attribute_list = upgrade_attribute_list_16( - self, attribute_list) + attribute_list = upgrade_attribute_list_16(self, attribute_list) if event_ref_list: event_ref_list = upgrade_event_ref_list_16(self, event_ref_list) - if source_list or media_list or child_ref_list or \ - attribute_list or lds_seal_list or event_ref_list: + if(source_list or media_list or child_ref_list or + attribute_list or lds_seal_list or event_ref_list): new_family = (handle, gramps_id, father_handle, mother_handle, child_ref_list, the_type, event_ref_list, media_list, attribute_list, lds_seal_list, new_citation_list, note_list, change, tag_list, private) LOG.debug(" upgrade new_family %s" % [new_family]) - with BSDDBTxn(self.env, self.family_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_family) + self._commit_raw(new_family, FAMILY_KEY) self.update() LOG.debug("%d families upgraded with %d citations in %d seconds. " % - (len(list(self.family_map.keys())), + (self.get_number_of_families(), self.cmap_index - start_num_citations, time.time() - start_time)) - data_upgradeobject[key2data[FAMILY_KEY]] = (len(list(self.family_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[FAMILY_KEY]] = ( + self.get_number_of_families(), self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Events # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for event_handle in self.event_map.keys(): - event = self.event_map[event_handle] + for event_handle in self.get_event_handles(): + event = self.get_raw_event_data(event_handle) LOG.debug("upgrade event %s" % event[4]) (handle, gramps_id, the_type, date, description, place, source_list, note_list, media_list, attribute_list, change, private) = event if source_list: new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) else: new_citation_list = [] if attribute_list: - attribute_list = upgrade_attribute_list_16( - self, attribute_list) + attribute_list = upgrade_attribute_list_16(self, attribute_list) if media_list: - media_list = upgrade_media_list_16( - self, media_list) + media_list = upgrade_media_list_16(self, media_list) if source_list or attribute_list or media_list: new_event = (handle, gramps_id, the_type, date, description, place, new_citation_list, note_list, media_list, attribute_list, change, private) LOG.debug(" upgrade new_event %s" % [new_event]) - with BSDDBTxn(self.env, self.event_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_event) + self._commit_raw(new_event, EVENT_KEY) self.update() LOG.debug("%d events upgraded with %d citations in %d seconds. " % - (len(self.event_map.keys()), + (self.get_number_of_events(), self.cmap_index - start_num_citations, time.time() - start_time)) - data_upgradeobject[key2data[EVENT_KEY]] = (len(list(self.event_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[EVENT_KEY]] = ( + self.get_number_of_events(), self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Repositories # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for repository_handle in self.repository_map.keys(): - repository = self.repository_map[repository_handle] + for repository_handle in self.get_repository_handles(): + repository = self.get_raw_repository_data(repository_handle) LOG.debug("upgrade repository %s" % repository[3]) (handle, gramps_id, the_type, name, note_list, address_list, urls, change, private) = repository if address_list: - address_list = upgrade_address_list_16( - self, address_list) + address_list = upgrade_address_list_16(self, address_list) if address_list: new_repository = (handle, gramps_id, the_type, name, note_list, address_list, urls, change, private) LOG.debug(" upgrade new_repository %s" % [new_repository]) - with BSDDBTxn(self.env, self.repository_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_repository) + self._commit_raw(new_repository, REPOSITORY_KEY) self.update() LOG.debug("%d repositories upgraded with %d citations in %d seconds. " % - (len(list(self.repository_map.keys())), + (self.get_number_of_repositories(), self.cmap_index - start_num_citations, time.time() - start_time)) - data_upgradeobject[key2data[REPOSITORY_KEY]] = (len(list(self.repository_map.keys())), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[REPOSITORY_KEY]] = ( + self.get_number_of_repositories(), + self.cmap_index - start_num_citations, + time.time() - start_time) # --------------------------------- # Modify Source # --------------------------------- start_num_citations = self.cmap_index start_time = time.time() - for source_handle in self.source_map.keys(): - source = self.source_map[source_handle] + for source_handle in self.get_source_handles(): + source = self.get_raw_source_data(source_handle) LOG.debug("upgrade source %s" % source[2]) (handle, gramps_id, title, author, pubinfo, note_list, media_list, abbrev, change, datamap, reporef_list, private) = source if media_list: - media_list = upgrade_media_list_16( - self, media_list) + media_list = upgrade_media_list_16(self, media_list) new_source = (handle, gramps_id, title, author, pubinfo, note_list, media_list, abbrev, change, datamap, reporef_list, private) LOG.debug(" upgrade new_source %s" % [new_source]) - with BSDDBTxn(self.env, self.source_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_source) + self._commit_raw(new_source, SOURCE_KEY) self.update() LOG.debug("%d sources upgraded with %d citations in %d seconds" % - (len(self.source_map.keys()), + (self.get_number_of_sources(), self.cmap_index - start_num_citations, int(time.time() - start_time))) - data_upgradeobject[key2data[SOURCE_KEY]] = (len(self.source_map.keys()), - self.cmap_index - start_num_citations, - time.time() - start_time) + data_upgradeobject[key2data[SOURCE_KEY]] = ( + self.get_number_of_sources(), self.cmap_index - start_num_citations, + time.time() - start_time) - # --------------------------------- +# --------------------------------- +# Example database from repository took: +# 3403 events upgraded with 8 citations in 23 seconds. Backlinks took 1071 secs +# actually 4 of these citations were from: +# Media upgrade 4 citations upgraded in 4 seconds +# by only doing the backlinks when there might be something to do, +# improved to: +# 3403 events upgraded with 8 citations in 19 seconds. Backlinks took 1348 secs +# further improved by skipping debug logging: +# 3403 events upgraded with 8 citations in 2 seconds. Backlinks took 167 secs - - # --------------------------------- - # Example database from repository took: - # 3403 events upgraded with 8 citations in 23 seconds. Backlinks took 1071 seconds - # actually 4 of these citations were from: - # Media upgrade 4 citations upgraded in 4 seconds - # by only doing the backlinks when there might be something to do, - # improved to: - # 3403 events upgraded with 8 citations in 19 seconds. Backlinks took 1348 seconds - # further improved by skipping debug logging: - # 3403 events upgraded with 8 citations in 2 seconds. Backlinks took 167 seconds - - #Number of new objects upgraded: +#Number of new objects upgraded: # 2090 People upgraded with 2092 citations in 2148 secs # 734 Families upgraded with 735 citations in 768 secs # 3403 Events upgraded with 4 citations in 212 secs @@ -778,7 +708,7 @@ def gramps_upgrade_16(self): # 7 Media Objects upgraded with 4 citations in 2 secs # 852 Places upgraded with 0 citations in 1 secs -# without incorrect nestetd tranaction structure: +# without incorrect nested tranaction structure: #Number of new objects upgraded: # 73 People upgraded with 76 citations in 0 secs # 35 Families upgraded with 36 citations in 0 secs @@ -786,90 +716,89 @@ def gramps_upgrade_16(self): # 7 Media Objects upgraded with 4 citations in 0 secs # 852 Places upgraded with 0 citations in 0 secs -#[[(73, 76, 0.12430405616760254), (35, 36, 0.042523860931396484), (3403, 4, 0.52303886413574219), (7, 4, 0.058229923248291016), (852, 0, 0.14816904067993164)]] - - - - - - + self._txn_commit() # Bump up database version. Separate transaction to save metadata. - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', 16) + self._set_metadata('version', 16) LOG.debug([data_upgradeobject]) txt = _("Number of new objects upgraded:\n") for key in keyorder: try: txt += key2string[key] % { - 'n1' : data_upgradeobject[key2data[key]][0], - 'n2' : data_upgradeobject[key2data[key]][1], - 'n3' : data_upgradeobject[key2data[key]][2] } + 'n1' : data_upgradeobject[key2data[key]][0], + 'n2' : data_upgradeobject[key2data[key]][1], + 'n3' : data_upgradeobject[key2data[key]][2]} except: txt += key2string[key] txt += _("\n\nYou may want to run\n" "Tools -> Family Tree Processing -> Merge\n" "in order to merge citations that contain similar\n" "information") - InfoDialog(_('Upgrade Statistics'), txt, monospaced=True) # TODO no-parent + InfoDialog(_('Upgrade Statistics'), txt, monospaced=True) # TODO no-parent + def upgrade_media_list_16(self, media_list): new_media_list = [] for media in media_list: (privacy, source_list, note_list, attribute_list, ref, rect) = media new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) - new_attribute_list = upgrade_attribute_list_16( - self, attribute_list) + self, source_list) + new_attribute_list = upgrade_attribute_list_16(self, attribute_list) new_media = (privacy, new_citation_list, note_list, new_attribute_list, ref, rect) new_media_list.append((new_media)) return new_media_list + def upgrade_attribute_list_16(self, attribute_list): new_attribute_list = [] for attribute in attribute_list: (privacy, source_list, note_list, the_type, value) = attribute new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) new_attribute = (privacy, new_citation_list, note_list, the_type, value) new_attribute_list.append((new_attribute)) return new_attribute_list + def upgrade_child_ref_list_16(self, child_ref_list): new_child_ref_list = [] for child_ref in child_ref_list: (privacy, source_list, note_list, ref, frel, mrel) = child_ref new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) - new_child_ref = (privacy, new_citation_list, note_list, ref, frel, mrel) + self, source_list) + new_child_ref = (privacy, new_citation_list, note_list, ref, + frel, mrel) new_child_ref_list.append((new_child_ref)) return new_child_ref_list + def upgrade_lds_seal_list_16(self, lds_seal_list): new_lds_seal_list = [] for lds_seal in lds_seal_list: - (source_list, note_list, date, type, place, + (source_list, note_list, date, type_, place, famc, temple, status, private) = lds_seal new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) - new_lds_seal = (new_citation_list, note_list, date, type, place, + self, source_list) + new_lds_seal = (new_citation_list, note_list, date, type_, place, famc, temple, status, private) new_lds_seal_list.append((new_lds_seal)) return new_lds_seal_list + def upgrade_address_list_16(self, address_list): new_address_list = [] for address in address_list: (privacy, source_list, note_list, date, location) = address new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) new_address = (privacy, new_citation_list, note_list, date, location) new_address_list.append((new_address)) return new_address_list + def upgrade_name_list_16(self, name_list): new_name_list = [] for name in name_list: @@ -877,37 +806,40 @@ def upgrade_name_list_16(self, name_list): new_name_list.append((new_name)) return new_name_list + def upgrade_name_16(self, name): (privacy, source_list, note, date, first_name, surname_list, suffix, title, name_type, group_as, sort_as, display_as, call, nick, famnick) = name new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) new_name = (privacy, new_citation_list, note, date, first_name, surname_list, suffix, title, name_type, group_as, sort_as, display_as, call, nick, famnick) return new_name + def upgrade_person_ref_list_16(self, person_ref_list): new_person_ref_list = [] for person_ref in person_ref_list: (privacy, source_list, note_list, ref, rel) = person_ref new_citation_list = convert_source_list_to_citation_list_16( - self, source_list) + self, source_list) new_person_ref = (privacy, new_citation_list, note_list, ref, rel) new_person_ref_list.append((new_person_ref)) return new_person_ref_list + def upgrade_event_ref_list_16(self, event_ref_list): new_event_ref_list = [] for event_ref in event_ref_list: (privacy, note_list, attribute_list, ref, role) = event_ref - new_attribute_list = upgrade_attribute_list_16( - self, attribute_list) + new_attribute_list = upgrade_attribute_list_16(self, attribute_list) new_event_ref = (privacy, note_list, new_attribute_list, ref, role) new_event_ref_list.append((new_event_ref)) return new_event_ref_list + def convert_source_list_to_citation_list_16(self, source_list): citation_list = [] for source in source_list: @@ -921,13 +853,11 @@ def convert_source_list_to_citation_list_16(self, source_list): date, page, confidence, ref, note_list, new_media_list, new_data_map, new_change, private) citation_list.append((new_handle)) - with BSDDBTxn(self.env, self.citation_map) as txn: - if isinstance(new_handle, str): - new_handle = new_handle.encode('utf-8') - txn.put(new_handle, new_citation) + self._commit_raw(new_citation, CITATION_KEY) self.cmap_index += 1 return citation_list + def gramps_upgrade_15(self): """ Upgrade database from version 14 to 15. This upgrade adds: @@ -936,41 +866,42 @@ def gramps_upgrade_15(self): * surname list * remove marker """ - length = (len(self.note_map) + len(self.person_map) + - len(self.event_map) + len(self.family_map) + - len(self.repository_map) + len(self.media_map) + - len(self.place_map) + len(self.source_map)) + 10 + length = (self.get_number_of_notes() + self.get_number_of_people() + + self.get_number_of_events() + self.get_number_of_families() + + self.get_number_of_repositories() + self.get_number_of_media() + + self.get_number_of_places() + self.get_number_of_sources()) + 10 self.set_total(length) + self._txn_begin() self.tags = {} # --------------------------------- # Modify Person # --------------------------------- # Replace the old marker field with the new tag list field. - for handle in self.person_map.keys(): - person = self.person_map[handle] + for handle in self.get_person_handles(): + person = self.get_raw_person_data(handle) - (junk_handle, # 0 - gramps_id, # 1 - gender, # 2 - primary_name, # 3 - alternate_names, # 4 - death_ref_index, # 5 - birth_ref_index, # 6 - event_ref_list, # 7 - family_list, # 8 - parent_family_list, # 9 - media_list, # 10 - address_list, # 11 - attribute_list, # 12 - urls, # 13 - ord_list, # 14 - psource_list, # 15 - pnote_list, # 16 - change, # 17 - marker, # 18 - pprivate, # 19 - person_ref_list, # 20 + (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + primary_name, # 3 + alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + media_list, # 10 + address_list, # 11 + attribute_list, # 12 + urls, # 13 + ord_list, # 14 + psource_list, # 15 + pnote_list, # 16 + change, # 17 + marker, # 18 + pprivate, # 19 + person_ref_list, # 20 ) = person tag_handle = convert_marker(self, marker) @@ -981,47 +912,38 @@ def gramps_upgrade_15(self): address_list = list(map(convert_address, address_list)) new_primary_name = convert_name_15(primary_name) new_alternate_names = list(map(convert_name_15, alternate_names)) - new_person = (junk_handle, # 0 - gramps_id, # 1 - gender, # 2 - new_primary_name, # 3 - new_alternate_names,# 4 - death_ref_index, # 5 - birth_ref_index, # 6 - event_ref_list, # 7 - family_list, # 8 - parent_family_list, # 9 - media_list, # 10 - address_list, # 11 - attribute_list, # 12 - urls, # 13 - ord_list, # 14 - psource_list, # 15 - pnote_list, # 16 - change, # 17 - tags, # 18 - pprivate, # 19 - person_ref_list # 20 + new_person = (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + new_primary_name, # 3 + new_alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + media_list, # 10 + address_list, # 11 + attribute_list, # 12 + urls, # 13 + ord_list, # 14 + psource_list, # 15 + pnote_list, # 16 + change, # 17 + tags, # 18 + pprivate, # 19 + person_ref_list # 20 ) - with BSDDBTxn(self.env, self.person_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_person) + self._commit_raw(new_person, PERSON_KEY) self.update() - #surname is now different, remove secondary index with names - _db = db.DB(self.env) - try: - _db.remove(_mkname(self.full_name, SURNAMES), SURNAMES) - except db.DBNoSuchFileError: - pass # --------------------------------- # Modify Family # --------------------------------- # Replace the old marker field with the new tag list field. - for handle in self.family_map.keys(): - family = self.family_map[handle] + for handle in self.get_family_handles(): + family = self.get_raw_family_data(handle) new_family = list(family) tag_handle = convert_marker(self, new_family[13]) if tag_handle: @@ -1029,18 +951,15 @@ def gramps_upgrade_15(self): else: new_family[13] = [] new_family = tuple(new_family) - with BSDDBTxn(self.env, self.family_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_family) + self._commit_raw(new_family, FAMILY_KEY) self.update() # --------------------------------- # Modify Note # --------------------------------- # Replace the old marker field with the new tag list field. - for handle in self.note_map.keys(): - note = self.note_map[handle] + for handle in self.get_note_handles(): + note = self.get_raw_note_data(handle) new_note = list(note) tag_handle = convert_marker(self, new_note[6]) if tag_handle: @@ -1048,95 +967,78 @@ def gramps_upgrade_15(self): else: new_note[6] = [] new_note = tuple(new_note) - with BSDDBTxn(self.env, self.note_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_note) + self._commit_raw(new_note, NOTE_KEY) self.update() # --------------------------------- # Modify Media object # --------------------------------- # Replace the old marker field with the new tag list field. - for handle in self.media_map.keys(): - media = self.media_map[handle] + for handle in self.get_media_handles(): + media = self.get_raw_media_data(handle) new_media = list(media) new_media[10] = [] new_media = tuple(new_media) - with BSDDBTxn(self.env, self.media_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_media) + self._commit_raw(new_media, MEDIA_KEY) self.update() # --------------------------------- # Modify Event # --------------------------------- # Replace the old marker field with the new tag list field. - for handle in self.event_map.keys(): - event = self.event_map[handle] + for handle in self.get_event_handles(): + event = self.get_raw_event_data(handle) new_event = list(event) new_event = new_event[:11] + new_event[12:] #new_event[11] = [] new_event = tuple(new_event) - with BSDDBTxn(self.env, self.event_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_event) + self._commit_raw(new_event, EVENT_KEY) self.update() # --------------------------------- # Modify Place # --------------------------------- # Remove the old marker field, set new locality. - for handle in self.place_map.keys(): - place = self.place_map[handle] + for handle in self.get_place_handles(): + place = self.get_raw_place_data(handle) new_place = list(place) if new_place[5] is not None: new_place[5] = convert_location(new_place[5]) new_place[6] = list(map(convert_location, new_place[6])) new_place = new_place[:12] + new_place[13:] new_place = tuple(new_place) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_place) + self._commit_raw(new_place, PLACE_KEY) self.update() # --------------------------------- # Modify Source # --------------------------------- # Remove the old marker field. - for handle in self.source_map.keys(): - source = self.source_map[handle] + for handle in self.get_source_handles(): + source = self.get_raw_source_data(handle) new_source = list(source) new_source = new_source[:11] + new_source[12:] new_source = tuple(new_source) - with BSDDBTxn(self.env, self.source_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_source) + self._commit_raw(new_source, SOURCE_KEY) self.update() # --------------------------------- # Modify Repository # --------------------------------- # Remove the old marker field, set new locality. - for handle in self.repository_map.keys(): - repository = self.repository_map[handle] + for handle in self.get_repository_handles(): + repository = self.get_raw_repository_data(handle) new_repository = list(repository) new_repository = new_repository[:8] + new_repository[9:] new_repository[5] = list(map(convert_address, new_repository[5])) new_repository = tuple(new_repository) - with BSDDBTxn(self.env, self.repository_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_repository) + self._commit_raw(new_repository, REPOSITORY_KEY) self.update() + self._txn_commit() # Bump up database version. Separate transaction to save metadata. - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', 15) + self._set_metadata('version', 15) + def convert_marker(self, marker_field): """Convert a marker into a tag.""" @@ -1152,27 +1054,28 @@ def convert_marker(self, marker_field): tag.set_change_time(time.time()) tag.set_name(tag_name) tag.set_priority(len(self.tags)) - with BSDDBTxn(self.env, self.tag_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, tag.serialize()) + self._commit_raw(tag.serialize(), TAG_KEY) self.tags[tag_name] = handle return self.tags[tag_name] else: return None + def convert_locbase(loc): """Convert location base to include an empty locality field.""" return tuple([loc[0], ''] + list(loc[1:])) + def convert_location(loc): """Convert a location into the new format.""" return (convert_locbase(loc[0]), loc[1]) + def convert_address(addr): """Convert an address into the new format.""" return (addr[0], addr[1], addr[2], addr[3], convert_locbase(addr[4])) + def convert_name_15(name): (privacy, source_list, note_list, date, first_name, surname, suffix, title, @@ -1189,7 +1092,8 @@ def convert_name_15(name): else: #a patronymic, if no surname or equal as patronymic, a single surname if (surname.strip() == "") or (surname == patronymic and prefix == ""): - surname_list = [(patronymic, prefix, True, patorigintype, connector)] + surname_list = [ + (patronymic, prefix, True, patorigintype, connector)] else: #two surnames, first patronymic, then surname which is primary surname_list = [(patronymic, "", False, patorigintype, ""), @@ -1197,41 +1101,40 @@ def convert_name_15(name): #return new value, add two empty strings for nick and family nick return (privacy, source_list, note_list, date, - first_name, surname_list, suffix, title, name_type, - group_as, sort_as, display_as, call, "", "") + first_name, surname_list, suffix, title, name_type, + group_as, sort_as, display_as, call, "", "") + def gramps_upgrade_14(self): """Upgrade database from version 13 to 14.""" # This upgrade modifies notes and dates - length = (len(self.note_map) + len(self.person_map) + - len(self.event_map) + len(self.family_map) + - len(self.repository_map) + len(self.media_map) + - len(self.place_map) + len(self.source_map)) + length = (self.get_number_of_notes() + self.get_number_of_people() + + self.get_number_of_events() + self.get_number_of_families() + + self.get_number_of_repositories() + self.get_number_of_media() + + self.get_number_of_places() + self.get_number_of_sources()) self.set_total(length) + self._txn_begin() # --------------------------------- # Modify Notes # --------------------------------- # replace clear text with StyledText in Notes - for handle in self.note_map.keys(): - note = self.note_map[handle] - (junk_handle, gramps_id, text, format, note_type, + for handle in self.get_note_handles(): + note = self.get_raw_note_data(handle) + (junk_handle, gramps_id, text, format_, note_type, change, marker, private) = note styled_text = (text, []) - new_note = (handle, gramps_id, styled_text, format, note_type, + new_note = (handle, gramps_id, styled_text, format_, note_type, change, marker, private) - with BSDDBTxn(self.env, self.note_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_note) + self._commit_raw(new_note, NOTE_KEY) self.update() # --------------------------------- # Modify Event # --------------------------------- # update dates with newyear - for handle in self.event_map.keys(): - event = self.event_map[handle] + for handle in self.get_event_handles(): + event = self.get_raw_event_data(handle) (junk_handle, gramps_id, the_type, date, description, place, source_list, note_list, media_list, attribute_list, change, marker, private) = event @@ -1239,42 +1142,39 @@ def gramps_upgrade_14(self): new_source_list = new_source_list_14(source_list) new_media_list = new_media_list_14(media_list) new_attribute_list = new_attribute_list_14(attribute_list) - new_event = (junk_handle, gramps_id, the_type, new_date, - description, place, new_source_list, note_list, - new_media_list, new_attribute_list, change,marker,private) - with BSDDBTxn(self.env, self.event_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_event) + new_event = (junk_handle, gramps_id, the_type, new_date, description, + place, new_source_list, note_list, new_media_list, + new_attribute_list, change, marker, private) + self._commit_raw(new_event, EVENT_KEY) self.update() # --------------------------------- # Modify Person # --------------------------------- # update dates with newyear - for handle in self.person_map.keys(): - person = self.person_map[handle] - (junk_handle, # 0 - gramps_id, # 1 - gender, # 2 - primary_name, # 3 - alternate_names, # 4 - death_ref_index, # 5 - birth_ref_index, # 6 - event_ref_list, # 7 - family_list, # 8 - parent_family_list, # 9 - media_list, # 10 - address_list, # 11 - attribute_list, # 12 - urls, # 13 - lds_ord_list, # 14 - psource_list, # 15 - pnote_list, # 16 - change, # 17 - marker, # 18 - pprivate, # 19 - person_ref_list, # 20 + for handle in self.get_person_handles(): + person = self.get_raw_person_data(handle) + (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + primary_name, # 3 + alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + media_list, # 10 + address_list, # 11 + attribute_list, # 12 + urls, # 13 + lds_ord_list, # 14 + psource_list, # 15 + pnote_list, # 16 + change, # 17 + marker, # 18 + pprivate, # 19 + person_ref_list, # 20 ) = person new_address_list = [] @@ -1286,12 +1186,12 @@ def gramps_upgrade_14(self): new_date, location)) new_ord_list = [] for ldsord in lds_ord_list: - (lsource_list, lnote_list, date, type, place, + (lsource_list, lnote_list, date, type_, place, famc, temple, status, lprivate) = ldsord new_date = convert_date_14(date) new_lsource_list = new_source_list_14(lsource_list) - new_ord_list.append( (new_lsource_list, lnote_list, new_date, type, - place, famc, temple, status, lprivate)) + new_ord_list.append((new_lsource_list, lnote_list, new_date, type_, + place, famc, temple, status, lprivate)) new_primary_name = convert_name_14(primary_name) @@ -1303,41 +1203,38 @@ def gramps_upgrade_14(self): new_attribute_list = new_attribute_list_14(attribute_list) new_person_ref_list = new_person_ref_list_14(person_ref_list) - new_person = (junk_handle, # 0 - gramps_id, # 1 - gender, # 2 - new_primary_name, # 3 - new_alternate_names, # 4 - death_ref_index, # 5 - birth_ref_index, # 6 - event_ref_list, # 7 - family_list, # 8 - parent_family_list, # 9 - new_media_list, # 10 - new_address_list, # 11 - new_attribute_list, # 12 - urls, # 13 - new_ord_list, # 14 - new_psource_list, # 15 - pnote_list, # 16 - change, # 17 - marker, # 18 - pprivate, # 19 - new_person_ref_list, # 20 + new_person = (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + new_primary_name, # 3 + new_alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + new_media_list, # 10 + new_address_list, # 11 + new_attribute_list, # 12 + urls, # 13 + new_ord_list, # 14 + new_psource_list, # 15 + pnote_list, # 16 + change, # 17 + marker, # 18 + pprivate, # 19 + new_person_ref_list, # 20 ) - with BSDDBTxn(self.env, self.person_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_person) + self._commit_raw(new_person, PERSON_KEY) self.update() # --------------------------------- # Modify Family # --------------------------------- # update dates with newyear - for handle in self.family_map.keys(): - family = self.family_map[handle] + for handle in self.get_family_handles(): + family = self.get_raw_family_data(handle) (junk_handle, gramps_id, father_handle, mother_handle, child_ref_list, the_type, event_ref_list, media_list, attribute_list, lds_seal_list, source_list, note_list, @@ -1348,30 +1245,28 @@ def gramps_upgrade_14(self): new_attribute_list = new_attribute_list_14(attribute_list) new_seal_list = [] for ldsord in lds_seal_list: - (lsource_list, lnote_list, date, type, place, + (lsource_list, lnote_list, date, type_, place, famc, temple, status, lprivate) = ldsord new_date = convert_date_14(date) new_lsource_list = new_source_list_14(lsource_list) - new_seal_list.append( (new_lsource_list, lnote_list, new_date, type, - place, famc, temple, status, lprivate)) + new_seal_list.append((new_lsource_list, lnote_list, new_date, + type_, place, famc, temple, status, + lprivate)) new_family = (junk_handle, gramps_id, father_handle, mother_handle, - new_child_ref_list, the_type, event_ref_list, new_media_list, - new_attribute_list, new_seal_list, new_source_list, note_list, - change, marker, private) + new_child_ref_list, the_type, event_ref_list, + new_media_list, new_attribute_list, new_seal_list, + new_source_list, note_list, change, marker, private) - with BSDDBTxn(self.env, self.family_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_family) + self._commit_raw(new_family, FAMILY_KEY) self.update() # --------------------------------- # Modify Repository # --------------------------------- # update dates with newyear - for handle in self.repository_map.keys(): - repository = self.repository_map[handle] + for handle in self.get_repository_handles(): + repository = self.get_raw_repository_data(handle) # address (junk_handle, gramps_id, the_type, name, note_list, address_list, urls, change, marker, private) = repository @@ -1387,17 +1282,14 @@ def gramps_upgrade_14(self): new_repository = (junk_handle, gramps_id, the_type, name, note_list, new_address_list, urls, change, marker, private) - with BSDDBTxn(self.env, self.repository_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_repository) + self._commit_raw(new_repository, REPOSITORY_KEY) self.update() # --------------------------------- # Modify Media # --------------------------------- - for media_handle in self.media_map.keys(): - media = self.media_map[media_handle] + for media_handle in self.get_media_handles(): + media = self.get_raw_media_data(media_handle) (handle, gramps_id, path, mime, desc, attribute_list, source_list, note_list, change, date, marker, private) = media @@ -1407,17 +1299,14 @@ def gramps_upgrade_14(self): attribute_list, new_source_list, note_list, change, new_date, marker, private) - with BSDDBTxn(self.env, self.media_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_media) + self._commit_raw(new_media, MEDIA_KEY) self.update() # --------------------------------- # Modify Place # --------------------------------- - for place_handle in self.place_map.keys(): - place = self.place_map[place_handle] + for place_handle in self.get_place_handles(): + place = self.get_raw_place_data(place_handle) (handle, gramps_id, title, longi, lat, main_loc, alt_loc, urls, media_list, source_list, note_list, change, marker, private) = place @@ -1427,17 +1316,14 @@ def gramps_upgrade_14(self): main_loc, alt_loc, urls, new_media_list, new_source_list, note_list, change, marker, private) - with BSDDBTxn(self.env, self.place_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_place) + self._commit_raw(new_place, PLACE_KEY) self.update() # --------------------------------- # Modify Source # --------------------------------- - for source_handle in self.source_map.keys(): - source = self.source_map[source_handle] + for source_handle in self.get_source_handles(): + source = self.get_raw_source_data(source_handle) (handle, gramps_id, title, author, pubinfo, note_list, media_list, abbrev, change, datamap, reporef_list, @@ -1448,60 +1334,68 @@ def gramps_upgrade_14(self): abbrev, change, datamap, reporef_list, marker, private) - with BSDDBTxn(self.env, self.source_map) as txn: - if isinstance(handle, str): - handle = handle.encode('utf-8') - txn.put(handle, new_source) + self._commit_raw(new_source, SOURCE_KEY) self.update() + self._txn_commit() # Bump up database version. Separate transaction to save metadata. - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', 14) + self._set_metadata('version', 14) + def new_source_list_14(source_list): new_source_list = [] for source in source_list: (date, private, note_list, confidence, ref, page) = source new_date = convert_date_14(date) - new_source_list.append((new_date, private, note_list, confidence, ref, page)) + new_source_list.append((new_date, private, note_list, confidence, ref, + page)) return new_source_list + def new_attribute_list_14(attribute_list): new_attribute_list = [] for attribute in attribute_list: (private, asource_list, note_list, the_type, value) = attribute new_asource_list = new_source_list_14(asource_list) - new_attribute_list.append((private, new_asource_list, note_list, the_type, value)) + new_attribute_list.append((private, new_asource_list, note_list, + the_type, value)) return new_attribute_list + def new_media_list_14(media_list): # --------------------------------- # Event Media list # --------------------------------- new_media_list = [] for media in media_list: - (private, source_list, note_list,attribute_list,ref,role) = media + (private, source_list, note_list, attribute_list, ref, role) = media new_source_list = new_source_list_14(source_list) new_attribute_list = new_attribute_list_14(attribute_list) - new_media_list.append((private, new_source_list, note_list, new_attribute_list, ref, role)) + new_media_list.append((private, new_source_list, note_list, + new_attribute_list, ref, role)) return new_media_list + def new_person_ref_list_14(person_ref_list): new_person_ref_list = [] for person_ref in person_ref_list: (private, source_list, note_list, ref, rel) = person_ref new_source_list = new_source_list_14(source_list) - new_person_ref_list.append((private, new_source_list, note_list, ref, rel)) + new_person_ref_list.append((private, new_source_list, note_list, ref, + rel)) return new_person_ref_list + def new_child_ref_list_14(child_ref_list): new_child_ref_list = [] for data in child_ref_list: (private, source_list, note_list, ref, frel, mrel) = data new_source_list = new_source_list_14(source_list) - new_child_ref_list.append((private, new_source_list, note_list, ref, frel, mrel)) + new_child_ref_list.append((private, new_source_list, note_list, ref, + frel, mrel)) return new_child_ref_list + def convert_date_14(date): if date: (calendar, modifier, quality, dateval, text, sortval) = date @@ -1509,6 +1403,7 @@ def convert_date_14(date): else: return None + def convert_name_14(name): (privacy, source_list, note_list, date, first_name, surname, suffix, title, @@ -1521,3 +1416,32 @@ def convert_name_14(name): name_type, prefix, patronymic, group_as, sort_as, display_as, call) + +def make_zip_backup(dirname): + """ + This backs up the db files so an upgrade can be (manually) undone. + """ + LOG.debug("Make backup prior to schema upgrade") + import zipfile + # In Windows reserved characters is "<>:"/\|?*" + reserved_char = r':,<>"/\|?* ' + replace_char = "-__________" + filepath = os.path.join(dirname, NAME_FILE) + with open(filepath, "r", encoding='utf8') as name_file: + title = name_file.readline().strip() + trans = title.maketrans(reserved_char, replace_char) + title = title.translate(trans) + + if not os.access(dirname, os.W_OK): + LOG.warning("Can't write technical DB backup for %s", title) + return + (grampsdb_path, db_code) = os.path.split(dirname) + dotgramps_path = os.path.dirname(grampsdb_path) + zipname = title + time.strftime("_%Y-%m-%d_%H-%M-%S") + ".zip" + zippath = os.path.join(dotgramps_path, zipname) + with zipfile.ZipFile(zippath, 'w') as myzip: + for filename in os.listdir(dirname): + pathname = os.path.join(dirname, filename) + myzip.write(pathname, os.path.join(db_code, filename)) + LOG.warning("If upgrade and loading the Family Tree works, you can " + "delete the zip file at %s", zippath) diff --git a/gramps/gui/dbloader.py b/gramps/gui/dbloader.py index 3525cc38f..4f7f6e755 100644 --- a/gramps/gui/dbloader.py +++ b/gramps/gui/dbloader.py @@ -54,18 +54,14 @@ from gi.repository import GObject from gramps.gen.const import GRAMPS_LOCALE as glocale from gramps.gen.db.dbconst import DBBACKEND from gramps.gen.db.utils import make_database +from gramps.gen.db.upgrade import make_zip_backup _ = glocale.translation.gettext from gramps.cli.grampscli import CLIDbLoader from gramps.gen.config import config from gramps.gen.db.exceptions import (DbUpgradeRequiredError, - BsddbDowngradeError, DbVersionError, DbPythonError, - DbEnvironmentError, - BsddbUpgradeRequiredError, - BsddbDowngradeRequiredError, - PythonUpgradeRequiredError, - PythonDowngradeError, + DbSupportedError, DbConnectionError) from .pluginmanager import GuiPluginManager from .dialog import (DBErrorDialog, ErrorDialog, QuestionDialog2, @@ -163,130 +159,70 @@ class DbLoader(CLIDbLoader): if not os.access(filename, os.W_OK): mode = "r" self._warn(_('Read only database'), - _('You do not have write access ' - 'to the selected file.')) + _('You do not have write access ' + 'to the selected file.')) else: mode = "w" else: mode = 'w' - dbid_path = os.path.join(filename, DBBACKEND) - if os.path.isfile(dbid_path): - with open(dbid_path) as fp: - dbid = fp.read().strip() - else: - dbid = "bsddb" - - db = make_database(dbid) - db.disable_signals() self.dbstate.no_database() - if db.requires_login() and username is None: - login = GrampsLoginDialog(self.uistate) - credentials = login.run() - if credentials is None: - return - username, password = credentials - - self._begin_progress() + self.uistate.progress.show() + self.uistate.pulse_progressbar(0) force_schema_upgrade = False - force_bsddb_upgrade = False - force_bsddb_downgrade = False - force_python_upgrade = False try: while True: + dbid_path = os.path.join(filename, DBBACKEND) + if os.path.isfile(dbid_path): + with open(dbid_path) as fp: + dbid = fp.read().strip() + else: + dbid = "bsddb" + + db = make_database(dbid) + db.disable_signals() + if db.requires_login() and username is None: + login = GrampsLoginDialog(self.uistate) + credentials = login.run() + if credentials is None: + return + username, password = credentials + try: - db.load(filename, self._pulse_progress, + db.load(filename, self.uistate.pulse_progressbar, mode, force_schema_upgrade, - force_bsddb_upgrade, - force_bsddb_downgrade, - force_python_upgrade, username=username, password=password) if self.dbstate.is_open(): self.dbstate.db.close( - user=User(callback=self._pulse_progress, + user=User(callback=self.uistate.pulse_progressbar, uistate=self.uistate, dbstate=self.dbstate)) self.dbstate.change_database(db) break - except DbUpgradeRequiredError as msg: - if QuestionDialog2(_("Are you sure you want " + except (DbSupportedError, DbUpgradeRequiredError) as msg: + if(force_schema_upgrade or + QuestionDialog2(_("Are you sure you want " "to upgrade this Family Tree?"), str(msg), _("I have made a backup,\n" "please upgrade my Family Tree"), _("Cancel"), - parent=self.uistate.window).run(): + parent=self.uistate.window).run()): force_schema_upgrade = True - force_bsddb_upgrade = False - force_bsddb_downgrade = False - force_python_upgrade = False - else: - self.dbstate.no_database() - break - except BsddbUpgradeRequiredError as msg: - if QuestionDialog2(_("Are you sure you want " - "to upgrade this Family Tree?"), - str(msg), - _("I have made a backup,\n" - "please upgrade my Family Tree"), - _("Cancel"), - parent=self.uistate.window).run(): - force_schema_upgrade = False - force_bsddb_upgrade = True - force_bsddb_downgrade = False - force_python_upgrade = False - else: - self.dbstate.no_database() - break - except BsddbDowngradeRequiredError as msg: - if QuestionDialog2(_("Are you sure you want " - "to downgrade this Family Tree?"), - str(msg), - _("I have made a backup,\n" - "please downgrade my Family Tree"), - _("Cancel"), - parent=self.uistate.window).run(): - force_schema_upgrade = False - force_bsddb_upgrade = False - force_bsddb_downgrade = True - force_python_upgrade = False - else: - self.dbstate.no_database() - break - except PythonUpgradeRequiredError as msg: - if QuestionDialog2(_("Are you sure you want " - "to upgrade this Family Tree?"), - str(msg), - _("I have made a backup,\n" - "please upgrade my Family Tree"), - _("Cancel"), - parent=self.uistate.window).run(): - force_schema_upgrade = False - force_bsddb_upgrade = False - force_bsddb_downgrade = False - force_python_upgrade = True + make_zip_backup(filename) else: self.dbstate.no_database() break # Get here is there is an exception the while loop does not handle - except BsddbDowngradeError as msg: - self.dbstate.no_database() - self._warn( _("Cannot open database"), str(msg)) except DbVersionError as msg: self.dbstate.no_database() self._errordialog( _("Cannot open database"), str(msg)) except DbPythonError as msg: self.dbstate.no_database() self._errordialog( _("Cannot open database"), str(msg)) - except DbEnvironmentError as msg: - self.dbstate.no_database() - self._errordialog( _("Cannot open database"), str(msg)) - except PythonDowngradeError as msg: - self.dbstate.no_database() - self._warn( _("Cannot open database"), str(msg)) except DbConnectionError as msg: self.dbstate.no_database() self._warn(_("Cannot open database"), str(msg)) @@ -300,7 +236,8 @@ class DbLoader(CLIDbLoader): except Exception as newerror: self.dbstate.no_database() self._dberrordialog(str(newerror)) - self._end_progress() + + self.uistate.progress.hide() return True #------------------------------------------------------------------------- diff --git a/gramps/plugins/db/bsddb/__init__.py b/gramps/plugins/db/bsddb/__init__.py deleted file mode 100644 index ac9b07863..000000000 --- a/gramps/plugins/db/bsddb/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -""" -Gramps Database API. - -Database Architecture -===================== - -Access to the database is made through Python classes. Exactly -what functionality you have is dependent on the properties of the -database. For example, if you are accessing a read-only view, then -you will only have access to a subset of the methods available. - -At the root of any database interface is either :py:class:`.DbReadBase` and/or -:py:class:`.DbWriteBase`. These define the methods to read and write to a -database, respectively. - -The full database hierarchy is: - -- :py:class:`.DbBsddb` - read and write implementation to BSDDB databases - - * :py:class:`.DbWriteBase` - virtual and implementation-independent methods - for reading data - - * :py:class:`.DbBsddbRead` - read-only (accessors, getters) implementation - to BSDDB databases - - + :py:class:`.DbReadBase` - virtual and implementation-independent - methods for reading data - - + :py:class:`.Callback` - callback and signal functions - - * :py:class:`.UpdateCallback` - callback functionality - -DbBsddb -======= - -The :py:class:`.DbBsddb` interface defines a hierarchical database -(non-relational) written in -`PyBSDDB `_. There is no -such thing as a database schema, and the meaning of the data is -defined in the Python classes above. The data is stored as pickled -tuples and unserialized into the primary data types (below). - -More details can be found in the manual's -`Using database API `_. -""" - -from gramps.gen.db.base import * -from gramps.gen.db.dbconst import * -from .cursor import * -from .read import * -from .bsddbtxn import * -from gramps.gen.db.txn import * -from .undoredo import * -from gramps.gen.db.exceptions import * -from .write import * diff --git a/gramps/plugins/db/bsddb/bsddb.gpr.py b/gramps/plugins/db/bsddb/bsddb.gpr.py index 6ce179567..31e655fea 100644 --- a/gramps/plugins/db/bsddb/bsddb.gpr.py +++ b/gramps/plugins/db/bsddb/bsddb.gpr.py @@ -26,7 +26,7 @@ plg.id = 'bsddb' plg.name = _("BSDDB") plg.name_accell = _("_BSDDB Database") plg.description = _("Berkeley Software Distribution Database Backend") -plg.version = '1.0' +plg.version = '2.0' plg.gramps_target_version = "5.1" plg.status = STABLE plg.fname = 'bsddb.py' diff --git a/gramps/plugins/db/bsddb/bsddb.py b/gramps/plugins/db/bsddb/bsddb.py index cfae6c76e..33090c95d 100644 --- a/gramps/plugins/db/bsddb/bsddb.py +++ b/gramps/plugins/db/bsddb/bsddb.py @@ -1,7 +1,8 @@ # # Gramps - a GTK+/GNOME based genealogy program # -# Copyright (C) 2015-2016 Douglas S. Blank +# Copyright (C) 2020 Paul Culley +# Copyright (C) 2020 Nick Hall # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,7 +18,246 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # +""" BSDDB upgrade module """ +#------------------------------------------------------------------------ +# +# Python Modules +# +#------------------------------------------------------------------------ +import os +import pickle +import logging +from bsddb3.db import DB, DB_DUP, DB_HASH, DB_RDONLY +#------------------------------------------------------------------------- +# +# Gramps modules +# +#------------------------------------------------------------------------- +from gramps.plugins.db.dbapi.sqlite import SQLite +from gramps.cli.clidbman import NAME_FILE, find_next_db_dir +from gramps.gen.db.dbconst import DBBACKEND, DBMODE_W, SCHVERSFN +from gramps.gen.db.exceptions import (DbException, DbSupportedError, + DbUpgradeRequiredError, DbVersionError) +from gramps.gen.db.utils import clear_lock_file +from gramps.gen.lib import Researcher +from gramps.gen.const import GRAMPS_LOCALE as glocale +from gramps.gen.updatecallback import UpdateCallback +_ = glocale.translation.gettext + +LOG = logging.getLogger(".upgrade") +_MINVERSION = 9 +_DBVERSION = 19 -from gramps.plugins.db.bsddb import DbBsddb +class DbBsddb(SQLite): + """ + Gramps BSDDB Converter + """ + def __init__(self): + """Create a new GrampsDB.""" + super().__init__() + + def load(self, dirname, callback=None, mode=DBMODE_W, + force_schema_upgrade=False, + update=True, + username=None, password=None): + """ + Here we create a sqlite db, and copy the bsddb into it. + The new db is initially in a new directory, when we finish the copy + we replace the contents of the original directory with the new db. + + We alway raise an exception to complete this, as the new db still + needs to be upgraded some more. When we raise the exception, the new + db is closed. + """ + if not update: + raise DbException("Not Available") + if not force_schema_upgrade: # make sure user wants to upgrade + raise DbSupportedError(_("BSDDB")) + + UpdateCallback.__init__(self, callback) + + # Here we open the dbapi db (a new one) for writing + new_path = find_next_db_dir() + os.mkdir(new_path) + # store dbid in new dir + dbid = 'sqlite' + backend_path = os.path.join(new_path, DBBACKEND) + with open(backend_path, "w", encoding='utf8') as backend_file: + backend_file.write(dbid) + + super().load(new_path, callback=None, mode='w', + force_schema_upgrade=False, + username=username, password=password) + + # now read in the bsddb and copy to dpapi + schema_vers = None + total = 0 + tables = ( + ('person', 'person'), + ('family', 'family'), + ('event', 'event'), + ('place', 'place'), + ('repo', 'repository'), + ('source', 'source'), + ('citation', 'citation'), + ('media', 'media'), + ('note', 'note'), + ('tag', 'tag'), + ('meta_data', 'metadata')) + + # open each dbmap, and get its length for the total + file_name = os.path.join(dirname, 'name_group.db') + if os.path.isfile(file_name): + name_group_dbmap = DB() + name_group_dbmap.set_flags(DB_DUP) + name_group_dbmap.open(file_name, 'name_group', DB_HASH, DB_RDONLY) + total += len(name_group_dbmap) + else: + name_group_dbmap = None + + table_list = [] + for old_t, new_t in (tables): + + file_name = os.path.join(dirname, old_t + '.db') + if not os.path.isfile(file_name): + continue + dbmap = DB() + dbmap.open(file_name, old_t, DB_HASH, DB_RDONLY) + total += len(dbmap) + table_list.append((old_t, new_t, dbmap)) + + self.set_total(total) + # copy data from each dbmap to sqlite table + for old_t, new_t, dbmap in table_list: + self._txn_begin() + if new_t == 'metadata': + sql = ("REPLACE INTO metadata (setting, value) VALUES " + "(?, ?)") + else: + sql = ("INSERT INTO %s (handle, blob_data) VALUES " + "(?, ?)" % new_t) + + for key in dbmap.keys(): + self.update() + data = pickle.loads(dbmap[key], encoding='utf-8') + + if new_t == 'metadata': + if key == b'version': + # found a schema version in metadata + schema_vers = data + elif key == b'researcher': + if len(data[0]) == 7: # Pre-3.3 format + # Upgrade researcher data to include a locality + # field in the address. + addr = tuple([data[0][0], ''] + list(data[0][1:])) + new_data = (addr, data[1], data[2], data[3]) + else: + new_data = data + data = Researcher().unserialize(new_data) + elif key == b'name_formats': + # upgrade formats if they were saved in the old way + for format_ix in range(len(data)): + fmat = data[format_ix] + if len(fmat) == 3: + fmat = fmat + (True,) + data[format_ix] = fmat + elif key == b'gender_stats': + # data is a dict, containing entries (see GenderStats) + self.dbapi.execute("DELETE FROM gender_stats") + g_sql = ("INSERT INTO gender_stats " + "(given_name, female, male, unknown) " + "VALUES (?, ?, ?, ?)") + for name in data: + female, male, unknown = data[name] + self.dbapi.execute(g_sql, + [name, female, male, unknown]) + continue # don't need this in metadata anymore + elif key == b'default': + # convert to string and change key + if isinstance(data, bytes): + data = data.decode('utf-8') + key = b'default-person-handle' + elif key == b'mediapath': + # change key + key = b'media-path' + elif key in [b'surname_list', # created by db now + b'pevent_names', # obsolete + b'fevent_names']: # obsolete + continue + elif (b'_names' in key or b'refs' in key or + b'_roles' in key or b'rels' in key or + b'_types' in key): + # These are list, but need to be set + data = set(data) + + self.dbapi.execute(sql, + [key.decode('utf-8'), + pickle.dumps(data)]) + + # get schema version from file if not in metadata + if new_t == 'metadata' and schema_vers is None: + versionpath = os.path.join(dirname, str(SCHVERSFN)) + if os.path.isfile(versionpath): + with open(versionpath, "r") as version_file: + schema_vers = int(version_file.read().strip()) + else: + schema_vers = 0 + # and put schema version into metadata + self.dbapi.execute(sql, ["version", schema_vers]) + self._txn_commit() + dbmap.close() + if new_t == 'metadata' and schema_vers < _MINVERSION: + raise DbVersionError(schema_vers, _MINVERSION, _DBVERSION) + + if name_group_dbmap: + self._txn_begin() + for key in name_group_dbmap.keys(): + self.update() + # name_group data (grouping) is NOT pickled + data = name_group_dbmap[key] + name = key.decode('utf-8') + grouping = data.decode('utf-8') + self.dbapi.execute( + "INSERT INTO name_group (name, grouping) VALUES (?, ?)", + [name, grouping]) + self._txn_commit() + name_group_dbmap.close() + + # done with new sqlite db, close it. Cannot use normal close as it + # overwrites the metadata. + self._close() + try: + clear_lock_file(self.get_save_path()) + except IOError: + pass + self.db_is_open = False + self._directory = None + + # copy tree name to new dir + old_db_name = os.path.join(dirname, NAME_FILE) + db_name = os.path.join(new_path, NAME_FILE) + with open(old_db_name, "r", encoding='utf8') as _file: + name = _file.read().strip() + with open(db_name, "w", encoding='utf8') as _file: + _file.write(name) + # remove files from old dir + for filename in os.listdir(dirname): + file_path = os.path.join(dirname, filename) + try: + os.unlink(file_path) + except Exception as e: + LOG.error('Failed to delete %s. Reason: %s' % (file_path, e)) + # copy new db files to old dir + for filename in os.listdir(new_path): + old_file_path = os.path.join(new_path, filename) + file_path = os.path.join(dirname, filename) + try: + os.replace(old_file_path, file_path) + except Exception as e: + LOG.error('Failed to move %s. Reason: %s' % (old_file_path, e)) + os.rmdir(new_path) + + # done preparing new db, but we still need to finish schema upgrades + raise DbUpgradeRequiredError(schema_vers, 'xx') diff --git a/gramps/plugins/db/bsddb/bsddbtxn.py b/gramps/plugins/db/bsddb/bsddbtxn.py deleted file mode 100644 index 360c8b7dd..000000000 --- a/gramps/plugins/db/bsddb/bsddbtxn.py +++ /dev/null @@ -1,239 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2009 Gerald W. Britton -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -""" -BSDDBTxn class: Wrapper for BSDDB transaction-oriented methods -""" - -#------------------------------------------------------------------------- -# -# Standard python modules -# -#------------------------------------------------------------------------- -import logging -import inspect -import os - -#------------------------------------------------------------------------- -# -# Gramps modules -# -#------------------------------------------------------------------------- -from gramps.gen.db.dbconst import DBLOGNAME -_LOG = logging.getLogger(DBLOGNAME) - -#------------------------------------------------------------------------- -# -# BSDDBTxn -# -#------------------------------------------------------------------------- - -class BSDDBTxn: - """ - Wrapper for BSDDB methods that set up and manage transactions. Implements - context management functionality allowing constructs like: - - with BSDDBTxn(env) as txn: - DB.get(txn=txn) - DB.put(txn=txn) - DB.delete(txn=txn) - - and other transaction-oriented DB access methods, where "env" is a - BSDDB DBEnv object and "DB" is a BSDDB database object. - - Transactions are automatically begun when the "with" statement is executed - and automatically committed when control flows off the end of the "with" - statement context, either implicitly by reaching the end of the indentation - level or explicity if a "return" statement is encountered or an exception - is raised. - """ - - __slots__ = ['env', 'db', 'txn', 'parent'] - - def __init__(self, env, db=None): - """ - Initialize transaction instance - """ - # Conditional on __debug__ because all that frame stuff may be slow - if __debug__: - caller_frame = inspect.stack()[1] - _LOG.debug(" BSDDBTxn %s instantiated. Called from file %s," - " line %s, in %s" % - ((hex(id(self)),)+ - (os.path.split(caller_frame[1])[1],)+ - (tuple(caller_frame[i] for i in range(2, 4))) - ) - ) - self.env = env - self.db = db - self.txn = None - - # Context manager methods - - def __enter__(self, parent=None, **kwargs): - """ - Context manager entry method - - Begin the transaction - """ - _LOG.debug(" BSDDBTxn %s entered" % hex(id(self))) - self.txn = self.begin(parent, **kwargs) - self.parent = parent - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - Context manager exit function - - Commit the transaction if no exception occurred - """ - _LOG.debug(" BSDDBTxn %s exited" % hex(id(self))) - if exc_type is not None: - return False - if self.txn: - self.commit() - return True - - # Methods implementing txn_ methods in DBEnv - - def begin(self, *args, **kwargs): - """ - Create and begin a new transaction. A DBTxn object is returned - """ - _LOG.debug(" BSDDBTxn %s begin" % hex(id(self))) - _LOG.debug(" BSDDBTxn %s calls %s %s txn_begin" % - (hex(id(self)), self.env.__class__.__name__, - hex(id(self.env))) - ) - self.txn = self.env.txn_begin(*args, **kwargs) - return self.txn - - def checkpoint(self, *args, **kwargs): - """ - Flush the underlying memory pool, write a checkpoint record to the - log and then flush the log - """ - if self.env: - self.env.txn_checkpoint(*args, **kwargs) - - def stat(self): - """ - Return a dictionary of transaction statistics - """ - if self.env: - return self.env.txn_stat() - - def recover(self): - """ - Returns a list of tuples (GID, TXN) of transactions prepared but - still unresolved - """ - if self.env: - return self.env.txn_recover() - - # Methods implementing DBTxn methods - - def abort(self): - """ - Abort the transaction - """ - if self.txn: - self.txn.abort() - self.txn = None - - def commit(self, flags=0): - """ - End the transaction, committing any changes to the databases - """ - _LOG.debug(" BSDDBTxn %s commit" % hex(id(self))) - if self.txn: - self.txn.commit(flags) - self.txn = None - - def id(self): - """ - Return the unique transaction id associated with the specified - transaction - """ - if self.txn: - return self.txn.id() - - def prepare(self, gid): - """ - Initiate the beginning of a two-phase commit - """ - if self.txn: - self.txn.prepare(gid) - - def discard(self): - """ - Release all the per-process resources associated with the specified - transaction, neither committing nor aborting the transaction - """ - if self.txn: - self.txn.discard() - self.txn = None - - # Methods implementing DB methods within the transaction context - - def get(self, key, default=None, txn=None, **kwargs): - """ - Returns the data object associated with key - """ - return self.db.get(key, default, txn or self.txn, **kwargs) - - def pget(self, key, default=None, txn=None, **kwargs): - """ - Returns the primary key, given the secondary one, and associated data - """ - return self.db.pget(key, default, txn or self.txn, **kwargs) - - def put(self, key, data, txn=None, **kwargs): - """ - Stores the key/data pair in the database - """ - return self.db.put(key, data, txn or self.txn, **kwargs) - - def delete(self, key, txn=None, **kwargs): - """ - Removes a key/data pair from the database - """ - self.db.delete(key, txn or self.txn, **kwargs) - -# test code -if __name__ == "__main__": - print("1") - from bsddb3 import db, dbshelve - print("2") - x = db.DBEnv() - print("3") - x.open('/tmp', db.DB_CREATE | db.DB_PRIVATE |\ - db.DB_INIT_MPOOL |\ - db.DB_INIT_LOG | db.DB_INIT_TXN) - print("4") - d = dbshelve.DBShelf(x) - print("5") - #from tran import BSDDBTxn as T - print("6") - T = BSDDBTxn - with T(x) as tx: - print("stat", tx.stat()) - print("id", tx.id()) - tx.checkpoint() diff --git a/gramps/plugins/db/bsddb/cursor.py b/gramps/plugins/db/bsddb/cursor.py deleted file mode 100644 index 1eecffc03..000000000 --- a/gramps/plugins/db/bsddb/cursor.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# gen/db/cursor.py - -#------------------------------------------------------------------------- -# -# Standard python modules -# -#------------------------------------------------------------------------- -from pickle import dumps, loads - -try: - from bsddb3 import db -except: - # FIXME: make this more abstract to deal with other backends - class db: - DB_RMW = 0 - DB_FIRST = 0 - DB_LAST = 0 - DB_CURRENT = 0 - DB_PREV = 0 - DB_NEXT = 0 - -#------------------------------------------------------------------------- -# -# BsddbBaseCursor class -# -#------------------------------------------------------------------------- - -class BsddbBaseCursor: - """ - Provide a basic iterator that allows the user to cycle through - the elements in a particular map. - - A cursor should never be directly instantiated. Instead, in should be - created by the database class. - - A cursor should only be used for a single pass through the - database. If multiple passes are needed, multiple cursors - should be used. - """ - - def __init__(self, txn=None, update=False, commit=False): - """ - Instantiate the object. Note, this method should be overridden in - derived classes that properly set self.cursor and self.source - """ - self.cursor = self.source = None - self.txn = txn - self._update = update - self.commit = commit - - def __getattr__(self, name): - """ - Return a method from the underlying cursor object, if it exists - """ - return getattr(self.cursor, name) - - def __enter__(self): - """ - Context manager enter method - """ - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - Context manager exit method - """ - self.close() - if self.txn and self.commit: - self.txn.commit() - return exc_type is None - - def __iter__(self): - """ - Iterator - """ - - data = self.first() - _n = self.next # Saved attribute lookup in the loop - while data: - yield data - data = _n() - - def _get(_flags=0): - """ Closure that returns a cursor get function """ - - def get(self, flags=0, **kwargs): - """ - Issue DBCursor get call (with DB_RMW flag if update requested) - Return results to caller - """ - data = self.cursor.get( - _flags | flags | (db.DB_RMW if self._update else 0), - **kwargs) - - return (data[0].decode('utf-8'), loads(data[1])) if data else None - - return get - - # Use closure to define access methods - - current = _get(db.DB_CURRENT) - first = _get(db.DB_FIRST) - ##python2 iterator - next = _get(db.DB_NEXT) - ##python3 iterator - __next__ = _get(db.DB_NEXT) - last = _get(db.DB_LAST) - prev = _get(db.DB_PREV) - - def update(self, key, data, flags=0, **kwargs): - """ - Write the current key, data pair to the database. - """ - self.cursor.put(key, dumps(data), flags=flags | db.DB_CURRENT, - **kwargs) diff --git a/gramps/plugins/db/bsddb/read.py b/gramps/plugins/db/bsddb/read.py deleted file mode 100644 index 57906b795..000000000 --- a/gramps/plugins/db/bsddb/read.py +++ /dev/null @@ -1,1948 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# Copyright (C) 2010 Nick Hall -# Copyright (C) 2011 Tim G L Lyons -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -""" -Read classes for the Gramps databases. -""" - -#------------------------------------------------------------------------- -# -# libraries -# -#------------------------------------------------------------------------- -import pickle -import time -import random -import os -from sys import maxsize -from operator import itemgetter -import ast -from functools import partial - -try: - from bsddb3 import db -except: - # FIXME: make this more abstract to deal with other backends - class db: - DBRunRecoveryError = 0 - DBAccessError = 0 - DBPageNotFoundError = 0 - DBInvalidArgError = 0 - -import re -import logging - -#------------------------------------------------------------------------- -# -# Gramps libraries -# -#------------------------------------------------------------------------- -from gramps.gen.lib.media import Media -from gramps.gen.lib.person import Person -from gramps.gen.lib.family import Family -from gramps.gen.lib.src import Source -from gramps.gen.lib.citation import Citation -from gramps.gen.lib.event import Event -from gramps.gen.lib.place import Place -from gramps.gen.lib.repo import Repository -from gramps.gen.lib.note import Note -from gramps.gen.lib.tag import Tag -from gramps.gen.lib.genderstats import GenderStats -from gramps.gen.lib.researcher import Researcher -from gramps.gen.lib.nameorigintype import NameOriginType - -from gramps.gen.utils.callback import Callback -from . import BsddbBaseCursor -from gramps.gen.db.base import DbReadBase -from gramps.gen.db.bookmarks import DbBookmarks -from gramps.gen.utils.id import create_id -from gramps.gen.errors import DbError, HandleError -from gramps.gen.constfunc import get_env_var -from gramps.gen.const import GRAMPS_LOCALE as glocale -_ = glocale.translation.gettext - -from gramps.gen.db.dbconst import * - -LOG = logging.getLogger(DBLOGNAME) -LOG = logging.getLogger(".citation") -#------------------------------------------------------------------------- -# -# constants -# -#------------------------------------------------------------------------- - -_SIGBASE = ('person', 'family', 'source', 'citation', - 'event', 'media', 'place', 'repository', - 'reference', 'note', 'tag') - -DBERRS = (db.DBRunRecoveryError, db.DBAccessError, - db.DBPageNotFoundError, db.DBInvalidArgError) - -#------------------------------------------------------------------------- -# -# Helper functions -# -#------------------------------------------------------------------------- -def find_byte_surname(key, data): - """ - Creating a surname from raw data of a person, to use for sort and index - returns a byte string - """ - surn = __index_surname(data[3][5]) - # in python 3 we work with unicode internally, but need byte function sometimes - return surn.encode('utf-8') - -def find_fullname(key, data): - """ - Creating a fullname from raw data of a person, to use for sort and index - """ - # data[3] -> primary_name - # data[3][4] -> primary given - # data[3][5] -> surname_list - # data[3][5][0] -> primary surnameobj - # data[3][5][0][0] -> primary surname - # data[3][5][0][1] -> primary surname prefix - # data[3][5][0][2] -> primary surname primary (bool) - # data[3][5][0][3] -> primary surname origin type - # data[3][5][0][4] -> primary surname connector - # [(surname + ' ' + given, - # surname prefix, - # surname primary, - # surname origin type, - # surname connector)] - if data[3][5]: # if Surname available - fullname_data = [(data[3][5][0][0] + ' ' + data[3][4], # combined - data[3][5][0][1], data[3][5][0][2], - data[3][5][0][3], data[3][5][0][4])] - else: # Some importers don't add any Surname at all - fullname_data = [(' ' + data[3][4], '', True, (1, ''), '')] - # ignore if origin type is PATRONYMIC or MATRONYMIC - return __index_surname(fullname_data) - -def find_surname(key, data): - """ - Creating a surname from raw data of a person, to use for sort and index - """ - # data[3][5] -> surname_list - return __index_surname(data[3][5]) - -def find_surname_name(key, data): - """ - Creating a surname from raw name, to use for sort and index - """ - return __index_surname(data[5]) - -def __index_surname(surn_list): - """ - All non pa/matronymic surnames are used in indexing. - pa/matronymic not as they change for every generation! - """ - if surn_list: - surn = " ".join([x[0] for x in surn_list if not (x[3][0] in [ - NameOriginType.PATRONYMIC, NameOriginType.MATRONYMIC]) ]) - else: - surn = "" - return surn - -#------------------------------------------------------------------------- -# -# GrampsDBReadCursor -# -#------------------------------------------------------------------------- -class DbReadCursor(BsddbBaseCursor): - - def __init__(self, source, txn=None, **kwargs): - BsddbBaseCursor.__init__(self, txn=txn, **kwargs) - self.cursor = source.db.cursor(txn) - self.source = source - -#------------------------------------------------------------------------- -# -# DbBsddbTreeCursor -# -#------------------------------------------------------------------------- -class DbBsddbTreeCursor(BsddbBaseCursor): - - def __init__(self, source, primary, readonly, txn=None, **kwargs): - BsddbBaseCursor.__init__(self, txn=txn, **kwargs) - self.cursor = source.cursor(txn) - self.source = source - self.primary = primary - self.readonly = readonly - - def __iter__(self): - """ - Iterator - """ - _n = self.next_dup - to_do = [''] - while to_do: - key = to_do.pop() - data = self.set(key.encode('utf-8')) - while data: - ### FIXME: this is a dirty hack that works without no - ### sensible explanation. For some reason, for a readonly - ### database, secondary index returns a primary table key - ### corresponding to the data, not the data. - if self.readonly: - payload = self.primary.get(data[1], txn=self.txn) - else: - payload = pickle.loads(data[1]) - yield (payload[0], payload) - to_do.append(payload[0]) - data = _n() - -class DbBsddbRead(DbReadBase, Callback): - """ - Read class for the Gramps databases. Implements methods necessary to read - the various object classes. Currently, there are nine (9) classes: - - :py:class:`.Person`, :py:class:`.Family`, :py:class:`.Event`, - :py:class:`.Place`, :py:class:`.Source`, - :py:class:`Citation <.lib.citation.Citation>`, :py:class:`.Media`, - :py:class:`.Repository` and :py:class:`.Note` - - For each object class, there are methods to retrieve data in various ways. - In the methods described below, can be one of person, family, - event, place, source, media, respository or note unless otherwise - specified. - - .. method:: get__from_handle() - - returns an object given its handle - - .. method:: get__from_gramps_id() - - returns an object given its gramps id - - .. method:: get__cursor() - - returns a cursor over an object. Example use:: - - with get_person_cursor() as cursor: - for handle, person in cursor: - # process person object pointed to by the handle - - .. method:: get__handles() - - returns a list of handles for the object type, optionally sorted - (for Citation, Family, Media, Person, Place, Source, and Tag objects) - - .. method:: iter__handles() - - returns an iterator that yields one object handle per call. - - .. method:: iter_() - - returns an iterator that yields one object per call. - The objects available are: people, families, events, places, - sources, media, repositories and notes. - - .. method:: get__event_types() - - returns a list of all Event types assocated with instances of - in the database. - - .. method:: get__attribute_types() - - returns a list of all Event types assocated with instances of - in the database. - """ - - __signals__ = {} - # If this is True logging will be turned on. - try: - _LOG_ALL = int(get_env_var('GRAMPS_SIGNAL', "0")) == 1 - except: - _LOG_ALL = False - - def __init__(self): - """ - Create a new DbBsddbRead instance. - """ - DbReadBase.__init__(self) - Callback.__init__(self) - - self.set_person_id_prefix('I%04d') - self.set_media_id_prefix('O%04d') - self.set_family_id_prefix('F%04d') - self.set_source_id_prefix('S%04d') - self.set_citation_id_prefix('C%04d') - self.set_place_id_prefix('P%04d') - self.set_event_id_prefix('E%04d') - self.set_repository_id_prefix('R%04d') - self.set_note_id_prefix('N%04d') - - self.readonly = False - self.rand = random.Random(time.time()) - self.smap_index = 0 - self.cmap_index = 0 - self.emap_index = 0 - self.pmap_index = 0 - self.fmap_index = 0 - self.lmap_index = 0 - self.omap_index = 0 - self.rmap_index = 0 - self.nmap_index = 0 - self.db_is_open = False - - self.event_names = set() - self.individual_attributes = set() - self.family_attributes = set() - self.source_attributes = set() - self.child_ref_types = set() - self.family_rel_types = set() - self.event_role_names = set() - self.name_types = set() - self.origin_types = set() - self.repository_types = set() - self.note_types = set() - self.source_media_types = set() - self.url_types = set() - self.media_attributes = set() - self.place_types = set() - - self.open = 0 - self.genderStats = GenderStats() - - self.undodb = [] - self.id_trans = {} - self.fid_trans = {} - self.pid_trans = {} - self.sid_trans = {} - self.cid_trans = {} - self.oid_trans = {} - self.rid_trans = {} - self.nid_trans = {} - self.eid_trans = {} - self.tag_trans = {} - self.env = None - self.person_map = {} - self.family_map = {} - self.place_map = {} - self.source_map = {} - self.citation_map = {} - self.repository_map = {} - self.note_map = {} - self.tag_map = {} - self.media_map = {} - self.event_map = {} - self.metadata = {} - self.name_group = {} - self.undo_callback = None - self.redo_callback = None - self.undo_history_callback = None - self.modified = 0 - - #self.undoindex = -1 - #self.translist = [None] * DBUNDO - self.abort_possible = True - #self.undo_history_timestamp = 0 - self.default = None - self.owner = Researcher() - self.name_formats = [] - self.bookmarks = DbBookmarks() - self.family_bookmarks = DbBookmarks() - self.event_bookmarks = DbBookmarks() - self.place_bookmarks = DbBookmarks() - self.source_bookmarks = DbBookmarks() - self.citation_bookmarks = DbBookmarks() - self.repo_bookmarks = DbBookmarks() - self.media_bookmarks = DbBookmarks() - self.note_bookmarks = DbBookmarks() - self._bm_changes = 0 - self.path = "" - self.surname_list = [] - self.txn = None - self.has_changed = False - - self.__tables = { - 'Person': - { - "handle_func": self.get_person_from_handle, - "gramps_id_func": self.get_person_from_gramps_id, - "class_func": Person, - "cursor_func": self.get_person_cursor, - "handles_func": self.get_person_handles, - "iter_func": self.iter_people, - }, - 'Family': - { - "handle_func": self.get_family_from_handle, - "gramps_id_func": self.get_family_from_gramps_id, - "class_func": Family, - "cursor_func": self.get_family_cursor, - "handles_func": self.get_family_handles, - "iter_func": self.iter_families, - }, - 'Source': - { - "handle_func": self.get_source_from_handle, - "gramps_id_func": self.get_source_from_gramps_id, - "class_func": Source, - "cursor_func": self.get_source_cursor, - "handles_func": self.get_source_handles, - "iter_func": self.iter_sources, - }, - 'Citation': - { - "handle_func": self.get_citation_from_handle, - "gramps_id_func": self.get_citation_from_gramps_id, - "class_func": Citation, - "cursor_func": self.get_citation_cursor, - "handles_func": self.get_citation_handles, - "iter_func": self.iter_citations, - }, - 'Event': - { - "handle_func": self.get_event_from_handle, - "gramps_id_func": self.get_event_from_gramps_id, - "class_func": Event, - "cursor_func": self.get_event_cursor, - "handles_func": self.get_event_handles, - "iter_func": self.iter_events, - }, - 'Media': - { - "handle_func": self.get_media_from_handle, - "gramps_id_func": self.get_media_from_gramps_id, - "class_func": Media, - "cursor_func": self.get_media_cursor, - "handles_func": self.get_media_handles, - "iter_func": self.iter_media, - }, - 'Place': - { - "handle_func": self.get_place_from_handle, - "gramps_id_func": self.get_place_from_gramps_id, - "class_func": Place, - "cursor_func": self.get_place_cursor, - "handles_func": self.get_place_handles, - "iter_func": self.iter_places, - }, - 'Repository': - { - "handle_func": self.get_repository_from_handle, - "gramps_id_func": self.get_repository_from_gramps_id, - "class_func": Repository, - "cursor_func": self.get_repository_cursor, - "handles_func": self.get_repository_handles, - "iter_func": self.iter_repositories, - }, - 'Note': - { - "handle_func": self.get_note_from_handle, - "gramps_id_func": self.get_note_from_gramps_id, - "class_func": Note, - "cursor_func": self.get_note_cursor, - "handles_func": self.get_note_handles, - "iter_func": self.iter_notes, - }, - 'Tag': - { - "handle_func": self.get_tag_from_handle, - "gramps_id_func": None, - "class_func": Tag, - "cursor_func": self.get_tag_cursor, - "handles_func": self.get_tag_handles, - "iter_func": self.iter_tags, - } - } - - def _get_table_func(self, table=None, func=None): - """ - Private implementation of get_table_func. - """ - if table is None: - return list(self.__tables.keys()) - elif func is None: - return self.__tables[table] - elif func in self.__tables[table].keys(): - return self.__tables[table][func] - else: - return None - - def set_prefixes(self, person, media, family, source, citation, place, - event, repository, note): - self.set_person_id_prefix(person) - self.set_media_id_prefix(media) - self.set_family_id_prefix(family) - self.set_source_id_prefix(source) - self.set_citation_id_prefix(citation) - self.set_place_id_prefix(place) - self.set_event_id_prefix(event) - self.set_repository_id_prefix(repository) - self.set_note_id_prefix(note) - #self.set_tag_id_prefix(tag) - - def version_supported(self): - """Return True when the file has a supported version.""" - return True - - def get_cursor(self, table, *args, **kwargs): - try: - return DbReadCursor(table, self.txn) - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - - def get_person_cursor(self, *args, **kwargs): - return self.get_cursor(self.person_map, *args, **kwargs) - - def get_family_cursor(self, *args, **kwargs): - return self.get_cursor(self.family_map, *args, **kwargs) - - def get_event_cursor(self, *args, **kwargs): - return self.get_cursor(self.event_map, *args, **kwargs) - - def get_place_cursor(self, *args, **kwargs): - return self.get_cursor(self.place_map, *args, **kwargs) - - def get_place_tree_cursor(self, *args, **kwargs): - return DbBsddbTreeCursor(self.parents, self.place_map, self.readonly, - self.txn) - - def get_source_cursor(self, *args, **kwargs): - return self.get_cursor(self.source_map, *args, **kwargs) - - def get_citation_cursor(self, *args, **kwargs): - return self.get_cursor(self.citation_map, *args, **kwargs) - - def get_media_cursor(self, *args, **kwargs): - return self.get_cursor(self.media_map, *args, **kwargs) - - def get_repository_cursor(self, *args, **kwargs): - return self.get_cursor(self.repository_map, *args, **kwargs) - - def get_note_cursor(self, *args, **kwargs): - return self.get_cursor(self.note_map, *args, **kwargs) - - def get_tag_cursor(self, *args, **kwargs): - return self.get_cursor(self.tag_map, *args, **kwargs) - - def close(self): - """ - Close the specified database. - - The method needs to be overridden in the derived class. - """ - #remove circular dependance - self.basedb = None - #remove links to functions - self.disconnect_all() -## self.bookmarks = None -## self.family_bookmarks = None -## self.event_bookmarks = None -## self.place_bookmarks = None -## self.source_bookmarks = None -## self.citation_bookmarks = None -## self.repo_bookmarks = None -## self.media_bookmarks = None -## self.note_bookmarks = None - - - def is_open(self): - """ - Return 1 if the database has been opened. - """ - return self.db_is_open - - def request_rebuild(self): - """ - Notify clients that the data has changed significantly, and that all - internal data dependent on the database should be rebuilt. - """ - self.emit('person-rebuild') - self.emit('family-rebuild') - self.emit('place-rebuild') - self.emit('source-rebuild') - self.emit('citation-rebuild') - self.emit('media-rebuild') - self.emit('event-rebuild') - self.emit('repository-rebuild') - self.emit('note-rebuild') - self.emit('tag-rebuild') - - def __find_next_gramps_id(self, prefix, map_index, trans): - """ - Helper function for find_next__gramps_id methods - """ - index = prefix % map_index - while trans.get(index.encode('utf-8'), txn=self.txn) is not None: - map_index += 1 - index = prefix % map_index - map_index += 1 - return (map_index, index) - - def find_next_person_gramps_id(self): - """ - Return the next available Gramps ID for a Person object based off the - person ID prefix. - """ - self.pmap_index, gid = self.__find_next_gramps_id(self.person_prefix, - self.pmap_index, self.id_trans) - return gid - - def find_next_place_gramps_id(self): - """ - Return the next available Gramps ID for a Place object based off the - place ID prefix. - """ - self.lmap_index, gid = self.__find_next_gramps_id(self.place_prefix, - self.lmap_index, self.pid_trans) - return gid - - def find_next_event_gramps_id(self): - """ - Return the next available Gramps ID for a Event object based off the - event ID prefix. - """ - self.emap_index, gid = self.__find_next_gramps_id(self.event_prefix, - self.emap_index, self.eid_trans) - return gid - - def find_next_media_gramps_id(self): - """ - Return the next available Gramps ID for a Media object based - off the media object ID prefix. - """ - self.omap_index, gid = self.__find_next_gramps_id(self.media_prefix, - self.omap_index, self.oid_trans) - return gid - - def find_next_source_gramps_id(self): - """ - Return the next available Gramps ID for a Source object based off the - source ID prefix. - """ - self.smap_index, gid = self.__find_next_gramps_id(self.source_prefix, - self.smap_index, self.sid_trans) - return gid - - def find_next_citation_gramps_id(self): - """ - Return the next available Gramps ID for a Source object based off the - source ID prefix. - """ - self.cmap_index, gid = self.__find_next_gramps_id(self.citation_prefix, - self.cmap_index, self.cid_trans) - return gid - - def find_next_family_gramps_id(self): - """ - Return the next available Gramps ID for a Family object based off the - family ID prefix. - """ - self.fmap_index, gid = self.__find_next_gramps_id(self.family_prefix, - self.fmap_index, self.fid_trans) - return gid - - def find_next_repository_gramps_id(self): - """ - Return the next available Gramps ID for a Respository object based - off the repository ID prefix. - """ - self.rmap_index, gid = self.__find_next_gramps_id(self.repository_prefix, - self.rmap_index, self.rid_trans) - return gid - - def find_next_note_gramps_id(self): - """ - Return the next available Gramps ID for a Note object based off the - note ID prefix. - """ - self.nmap_index, gid = self.__find_next_gramps_id(self.note_prefix, - self.nmap_index, self.nid_trans) - return gid - - def _get_from_handle(self, handle, class_type, data_map): - data = data_map.get(handle.encode('utf-8')) - if data: - newobj = class_type() - newobj.unserialize(data) - return newobj - raise HandleError('Handle %s not found' % handle) - - def get_person_from_handle(self, handle): - """ - Find a Person in the database from the passed handle. - - If no such Person exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Person, self.person_map) - - def get_source_from_handle(self, handle): - """ - Find a Source in the database from the passed handle. - - If no such Source exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Source, self.source_map) - - def get_citation_from_handle(self, handle): - """ - Find a Citation in the database from the passed handle. - - If no such Citation exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Citation, self.citation_map) - - def get_media_from_handle(self, handle): - """ - Find an Object in the database from the passed handle. - - If no such Object exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Media, self.media_map) - - def get_place_from_handle(self, handle): - """ - Find a Place in the database from the passed handle. - - If no such Place exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Place, self.place_map) - - def get_event_from_handle(self, handle): - """ - Find a Event in the database from the passed handle. - - If no such Event exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Event, self.event_map) - - def get_family_from_handle(self, handle): - """ - Find a Family in the database from the passed handle. - - If no such Family exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Family, self.family_map) - - def get_repository_from_handle(self, handle): - """ - Find a Repository in the database from the passed handle. - - If no such Repository exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Repository, self.repository_map) - - def get_note_from_handle(self, handle): - """ - Find a Note in the database from the passed handle. - - If no such Note exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Note, self.note_map) - - def get_tag_from_handle(self, handle): - """ - Find a Tag in the database from the passed handle. - - If no such Tag exists, a HandleError is raised. - """ - return self._get_from_handle(handle, Tag, self.tag_map) - - def __get_obj_from_gramps_id(self, val, tbl, class_, prim_tbl): - if isinstance(tbl, dict): - return None ## trying to get object too early - if val is None: - return None - try: - data = tbl.get(val.encode('utf-8'), txn=self.txn) - if data is not None: - obj = class_() - ### FIXME: this is a dirty hack that works without no - ### sensible explanation. For some reason, for a readonly - ### database, secondary index returns a primary table key - ### corresponding to the data, not the data. - if self.readonly: - tuple_data = prim_tbl.get(data, txn=self.txn) - else: - tuple_data = pickle.loads(data) - obj.unserialize(tuple_data) - return obj - else: - return None - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - - def get_person_from_gramps_id(self, val): - """ - Find a Person in the database from the passed Gramps ID. - - If no such Person exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.id_trans, Person, - self.person_map) - - def get_family_from_gramps_id(self, val): - """ - Find a Family in the database from the passed Gramps ID. - - If no such Family exists, None is return. - """ - return self.__get_obj_from_gramps_id(val, self.fid_trans, Family, - self.family_map) - - def get_event_from_gramps_id(self, val): - """ - Find an Event in the database from the passed Gramps ID. - - If no such Family exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.eid_trans, Event, - self.event_map) - - def get_place_from_gramps_id(self, val): - """ - Find a Place in the database from the passed Gramps ID. - - If no such Place exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.pid_trans, Place, - self.place_map) - - def get_source_from_gramps_id(self, val): - """ - Find a Source in the database from the passed Gramps ID. - - If no such Source exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.sid_trans, Source, - self.source_map) - - def get_citation_from_gramps_id(self, val): - """ - Find a Citation in the database from the passed Gramps ID. - - If no such Citation exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.cid_trans, Citation, - self.citation_map) - - def get_media_from_gramps_id(self, val): - """ - Find a Media in the database from the passed Gramps ID. - - If no such Media exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.oid_trans, Media, - self.media_map) - - def get_repository_from_gramps_id(self, val): - """ - Find a Repository in the database from the passed Gramps ID. - - If no such Repository exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.rid_trans, Repository, - self.repository_map) - - def get_note_from_gramps_id(self, val): - """ - Find a Note in the database from the passed Gramps ID. - - If no such Note exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.nid_trans, Note, - self.note_map) - - def get_tag_from_name(self, val): - """ - Find a Tag in the database from the passed Tag name. - - If no such Tag exists, None is returned. - """ - return self.__get_obj_from_gramps_id(val, self.tag_trans, Tag, - self.tag_map) - - def get_name_group_mapping(self, surname): - """ - Return the default grouping name for a surname. - Return type is a unicode object - """ - group = self.name_group.get(surname.encode('utf-8')) - if group is not None: - return group.decode('utf-8') - else: - return surname - - def get_name_group_keys(self): - """ - Return the defined names that have been assigned to a default grouping. - """ - return [ng.decode('utf-8') for ng in self.name_group.keys()] - - def has_name_group_key(self, name): - """ - Return if a key exists in the name_group table. - """ - # The use of has_key seems allright because there is no write lock - # on the name_group table when this is called. - name = name.encode('utf-8') - return name in self.name_group - - def get_number_of_records(self, table): - if not self.db_is_open: - return 0 - if self.txn is None: - return len(table) - else: - return table.stat(flags=db.DB_FAST_STAT, txn=self.txn)['nkeys'] - - def get_number_of_people(self): - """ - Return the number of people currently in the database. - """ - return self.get_number_of_records(self.person_map) - - def get_number_of_families(self): - """ - Return the number of families currently in the database. - """ - return self.get_number_of_records(self.family_map) - - def get_number_of_events(self): - """ - Return the number of events currently in the database. - """ - return self.get_number_of_records(self.event_map) - - def get_number_of_places(self): - """ - Return the number of places currently in the database. - """ - return self.get_number_of_records(self.place_map) - - def get_number_of_sources(self): - """ - Return the number of sources currently in the database. - """ - return self.get_number_of_records(self.source_map) - - def get_number_of_citations(self): - """ - Return the number of citations currently in the database. - """ - return self.get_number_of_records(self.citation_map) - - def get_number_of_media(self): - """ - Return the number of media objects currently in the database. - """ - return self.get_number_of_records(self.media_map) - - def get_number_of_repositories(self): - """ - Return the number of source repositories currently in the database. - """ - return self.get_number_of_records(self.repository_map) - - def get_number_of_notes(self): - """ - Return the number of notes currently in the database. - """ - return self.get_number_of_records(self.note_map) - - def get_number_of_tags(self): - """ - Return the number of tags currently in the database. - """ - return self.get_number_of_records(self.tag_map) - - def is_empty(self): - """ - Return true if there are no [primary] records in the database - """ - for obj_map in [self.person_map, self.family_map, self.event_map, - self.place_map, self.source_map, self.citation_map, - self.media_map, self.repository_map, self.note_map, - self.tag_map]: - if self.get_number_of_records(obj_map) > 0: - return False - return True - - def _all_handles(self, table): - """ - Return all the keys of a database table - """ - return [key.decode('utf-8') for key in table.keys(txn=self.txn)] - - def get_person_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Person in - the database. - - If sort_handles is True, the list is sorted by surnames. - """ - if self.db_is_open: - handle_list = self._all_handles(self.person_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbyperson_key, - locale=locale)) - return handle_list - return [] - - def get_place_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Place in - the database. - - If sort_handles is True, the list is sorted by Place title. - """ - - if self.db_is_open: - handle_list = self._all_handles(self.place_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbyplace_key, - locale=locale)) - return handle_list - return [] - - def get_source_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Source in - the database. - - If sort_handles is True, the list is sorted by Source title. - """ - if self.db_is_open: - handle_list = self._all_handles(self.source_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbysource_key, - locale=locale)) - return handle_list - return [] - - def get_citation_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Citation in - the database. - - If sort_handles is True, the list is sorted by Citation Volume/Page. - """ - if self.db_is_open: - handle_list = self._all_handles(self.citation_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbycitation_key, - locale=locale)) - return handle_list - return [] - - def get_media_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Media in - the database. - - If sort_handles is True, the list is sorted by title. - """ - if self.db_is_open: - handle_list = self._all_handles(self.media_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbymedia_key, - locale=locale)) - return handle_list - return [] - - def get_event_handles(self): - """ - Return a list of database handles, one handle for each Event in the - database. - """ - if self.db_is_open: - return self._all_handles(self.event_map) - return [] - - def get_family_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Family in - the database. - - If sort_handles is True, the list is sorted by surnames. - """ - if self.db_is_open: - handle_list = self._all_handles(self.family_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbyfamily_key, - locale=locale)) - return handle_list - return [] - - def get_repository_handles(self): - """ - Return a list of database handles, one handle for each Repository in - the database. - """ - if self.db_is_open: - return self._all_handles(self.repository_map) - return [] - - def get_note_handles(self): - """ - Return a list of database handles, one handle for each Note in the - database. - """ - if self.db_is_open: - return self._all_handles(self.note_map) - return [] - - def get_tag_handles(self, sort_handles=False, locale=glocale): - """ - Return a list of database handles, one handle for each Tag in - the database. - - If sort_handles is True, the list is sorted by Tag name. - """ - if self.db_is_open: - handle_list = self._all_handles(self.tag_map) - if sort_handles: - handle_list.sort(key=partial(self.__sortbytag_key, - locale=locale)) - return handle_list - return [] - - def _f(curs_): - """ - Closure that returns an iterator over handles in the database. - """ - def g(self): - with curs_(self) as cursor: - for key, data in cursor: - yield key - return g - - # Use closure to define iterators for each primary object type - - iter_person_handles = _f(get_person_cursor) - iter_family_handles = _f(get_family_cursor) - iter_event_handles = _f(get_event_cursor) - iter_place_handles = _f(get_place_cursor) - iter_source_handles = _f(get_source_cursor) - iter_citation_handles = _f(get_citation_cursor) - iter_media_handles = _f(get_media_cursor) - iter_repository_handles = _f(get_repository_cursor) - iter_note_handles = _f(get_note_cursor) - iter_tag_handles = _f(get_tag_cursor) - del _f - - def _f(curs_, obj_): - """ - Closure that returns an iterator over objects in the database. - """ - def g(self): - with curs_(self) as cursor: - for key, data in cursor: - obj = obj_() - obj.unserialize(data) - yield obj - return g - - # Use closure to define iterators for each primary object type - - iter_people = _f(get_person_cursor, Person) - iter_families = _f(get_family_cursor, Family) - iter_events = _f(get_event_cursor, Event) - iter_places = _f(get_place_cursor, Place) - iter_sources = _f(get_source_cursor, Source) - iter_citations = _f(get_citation_cursor, Citation) - iter_media = _f(get_media_cursor, Media) - iter_repositories = _f(get_repository_cursor, Repository) - iter_notes = _f(get_note_cursor, Note) - iter_tags = _f(get_tag_cursor, Tag) - del _f - - def find_initial_person(self): - person = self.get_default_person() - if not person: - the_ids = [] - for this_person in self.iter_people(): - the_ids.append(this_person.gramps_id) - if the_ids: - person = self.get_person_from_gramps_id(min(the_ids)) - return person - - @staticmethod - def _validated_id_prefix(val, default): - if val: - try: - str_ = val % 1 - except TypeError: # missing conversion specifier - prefix_var = val + "%d" - except ValueError: # incomplete format - prefix_var = default+"%04d" - else: - prefix_var = val # OK as given - else: - prefix_var = default+"%04d" # not a string or empty string - return prefix_var - - @staticmethod - def __id2user_format(id_pattern): - """ - Return a method that accepts a Gramps ID and adjusts it to the users - format. - """ - pattern_match = re.match(r"(.*)%[0 ](\d+)[diu]$", id_pattern) - if pattern_match: - str_prefix = pattern_match.group(1) - ##nr_width = pattern_match.group(2) - def closure_func(gramps_id): - if gramps_id and gramps_id.startswith(str_prefix): - id_number = gramps_id[len(str_prefix):] - if id_number.isdigit(): - id_value = int(id_number, 10) - ## this code never ran, as an int compared to str with > is False! -## if len(str(id_value)) > nr_width: -## # The ID to be imported is too large to fit in the -## # users format. For now just create a new ID, -## # because that is also what happens with IDs that -## # are identical to IDs already in the database. If -## # the problem of colliding import and already -## # present IDs is solved the code here also needs -## # some solution. -## gramps_id = id_pattern % 1 -## else: - gramps_id = id_pattern % id_value - return gramps_id - else: - def closure_func(gramps_id): - return gramps_id - return closure_func - - def set_person_id_prefix(self, val): - """ - Set the naming template for Gramps Person ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as I%d or I%04d. - """ - self.person_prefix = self._validated_id_prefix(val, "I") - self.id2user_format = self.__id2user_format(self.person_prefix) - - def set_source_id_prefix(self, val): - """ - Set the naming template for Gramps Source ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as S%d or S%04d. - """ - self.source_prefix = self._validated_id_prefix(val, "S") - self.sid2user_format = self.__id2user_format(self.source_prefix) - - def set_citation_id_prefix(self, val): - """ - Set the naming template for Gramps Citation ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as C%d or C%04d. - """ - self.citation_prefix = self._validated_id_prefix(val, "C") - self.cid2user_format = self.__id2user_format(self.citation_prefix) - - def set_media_id_prefix(self, val): - """ - Set the naming template for Gramps Media ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as O%d or O%04d. - """ - self.media_prefix = self._validated_id_prefix(val, "O") - self.oid2user_format = self.__id2user_format(self.media_prefix) - - def set_place_id_prefix(self, val): - """ - Set the naming template for Gramps Place ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as P%d or P%04d. - """ - self.place_prefix = self._validated_id_prefix(val, "P") - self.pid2user_format = self.__id2user_format(self.place_prefix) - - def set_family_id_prefix(self, val): - """ - Set the naming template for Gramps Family ID values. The string is - expected to be in the form of a simple text string, or in a format - that contains a C/Python style format string using %d, such as F%d - or F%04d. - """ - self.family_prefix = self._validated_id_prefix(val, "F") - self.fid2user_format = self.__id2user_format(self.family_prefix) - - def set_event_id_prefix(self, val): - """ - Set the naming template for Gramps Event ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as E%d or E%04d. - """ - self.event_prefix = self._validated_id_prefix(val, "E") - self.eid2user_format = self.__id2user_format(self.event_prefix) - - def set_repository_id_prefix(self, val): - """ - Set the naming template for Gramps Repository ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as R%d or R%04d. - """ - self.repository_prefix = self._validated_id_prefix(val, "R") - self.rid2user_format = self.__id2user_format(self.repository_prefix) - - def set_note_id_prefix(self, val): - """ - Set the naming template for Gramps Note ID values. - - The string is expected to be in the form of a simple text string, or - in a format that contains a C/Python style format string using %d, - such as N%d or N%04d. - """ - self.note_prefix = self._validated_id_prefix(val, "N") - self.nid2user_format = self.__id2user_format(self.note_prefix) - - def get_surname_list(self): - """ - Return the list of locale-sorted surnames contained in the database. - """ - return self.surname_list - - def get_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.bookmarks - - def get_family_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.family_bookmarks - - def get_event_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.event_bookmarks - - def get_place_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.place_bookmarks - - def get_source_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.source_bookmarks - - def get_citation_bookmarks(self): - """Return the list of Citation handles in the bookmarks.""" - return self.citation_bookmarks - - def get_media_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.media_bookmarks - - def get_repo_bookmarks(self): - """Return the list of Person handles in the bookmarks.""" - return self.repo_bookmarks - - def get_note_bookmarks(self): - """Return the list of Note handles in the bookmarks.""" - return self.note_bookmarks - - def set_researcher(self, owner): - """Set the information about the owner of the database.""" - self.owner.set_from(owner) - - def get_researcher(self): - """ - Return the Researcher instance, providing information about the owner - of the database. - """ - return self.owner - - def get_default_person(self): - """Return the default Person of the database.""" - person_handle = self.get_default_handle() - if person_handle: - person = self.get_person_from_handle(person_handle) - if person: - return person - elif (self.metadata is not None) and (not self.readonly): - self.metadata[b'default'] = None - return None - else: - return None - - def get_default_handle(self): - """Return the default Person of the database.""" - if self.metadata is not None: - return self.metadata.get(b'default') - return None - - def get_save_path(self): - """Return the save path of the file, or "" if one does not exist.""" - return self.path - - def get_event_attribute_types(self): - """ - Return a list of all Attribute types assocated with Event instances - in the database. - """ - return list(self.event_attributes) - - def get_event_types(self): - """ - Return a list of all event types in the database. - """ - return list(self.event_names) - - def get_person_event_types(self): - """ - Deprecated: Use get_event_types - """ - return list(self.event_names) - - def get_person_attribute_types(self): - """ - Return a list of all Attribute types assocated with Person instances - in the database. - """ - return list(self.individual_attributes) - - def get_family_attribute_types(self): - """ - Return a list of all Attribute types assocated with Family instances - in the database. - """ - return list(self.family_attributes) - - def get_family_event_types(self): - """ - Deprecated: Use get_event_types - """ - return list(self.event_names) - - def get_media_attribute_types(self): - """ - Return a list of all Attribute types assocated with Media and MediaRef - instances in the database. - """ - return list(self.media_attributes) - - def get_family_relation_types(self): - """ - Return a list of all relationship types assocated with Family - instances in the database. - """ - return list(self.family_rel_types) - - def get_child_reference_types(self): - """ - Return a list of all child reference types assocated with Family - instances in the database. - """ - return list(self.child_ref_types) - - def get_event_roles(self): - """ - Return a list of all custom event role names assocated with Event - instances in the database. - """ - return list(self.event_role_names) - - def get_name_types(self): - """ - Return a list of all custom names types assocated with Person - instances in the database. - """ - return list(self.name_types) - - def get_origin_types(self): - """ - Return a list of all custom origin types assocated with Person/Surname - instances in the database. - """ - return list(self.origin_types) - - def get_repository_types(self): - """ - Return a list of all custom repository types assocated with Repository - instances in the database. - """ - return list(self.repository_types) - - def get_note_types(self): - """ - Return a list of all custom note types assocated with Note instances - in the database. - """ - return list(self.note_types) - - def get_source_attribute_types(self): - """ - Return a list of all Attribute types assocated with Source/Citation - instances in the database. - """ - return list(self.source_attributes) - - def get_source_media_types(self): - """ - Return a list of all custom source media types assocated with Source - instances in the database. - """ - return list(self.source_media_types) - - def get_url_types(self): - """ - Return a list of all custom names types assocated with Url instances - in the database. - """ - return list(self.url_types) - - def get_place_types(self): - """ - Return a list of all custom place types assocated with Place instances - in the database. - """ - return list(self.place_types) - - def __log_error(self): - pass - - def __get_raw_data(self, table, handle): - """ - Helper method for get_raw__data methods - """ - if table is None: - return None ## trying to get object too early - try: - return table.get(handle.encode('utf-8'), txn=self.txn) - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - - def get_raw_person_data(self, handle): - return self.__get_raw_data(self.person_map, handle) - - def get_raw_family_data(self, handle): - return self.__get_raw_data(self.family_map, handle) - - def get_raw_media_data(self, handle): - return self.__get_raw_data(self.media_map, handle) - - def get_raw_place_data(self, handle): - return self.__get_raw_data(self.place_map, handle) - - def get_raw_event_data(self, handle): - return self.__get_raw_data(self.event_map, handle) - - def get_raw_source_data(self, handle): - return self.__get_raw_data(self.source_map, handle) - - def get_raw_citation_data(self, handle): - return self.__get_raw_data(self.citation_map, handle) - - def get_raw_repository_data(self, handle): - return self.__get_raw_data(self.repository_map, handle) - - def get_raw_note_data(self, handle): - return self.__get_raw_data(self.note_map, handle) - - def get_raw_tag_data(self, handle): - return self.__get_raw_data(self.tag_map, handle) - - def __has_handle(self, table, handle): - """ - Helper function for has__handle methods - """ - if handle is None: - return False - try: - return table.get(handle.encode('utf-8'), txn=self.txn) is not None - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - - def has_person_handle(self, handle): - """ - Return True if the handle exists in the current Person database. - """ - return self.__has_handle(self.person_map, handle) - - def has_family_handle(self, handle): - """ - Return True if the handle exists in the current Family database. - """ - return self.__has_handle(self.family_map, handle) - - def has_media_handle(self, handle): - """ - Return True if the handle exists in the current Mediadatabase. - """ - return self.__has_handle(self.media_map, handle) - - def has_repository_handle(self, handle): - """ - Return True if the handle exists in the current Repository database. - """ - return self.__has_handle(self.repository_map, handle) - - def has_note_handle(self, handle): - """ - Return True if the handle exists in the current Note database. - """ - return self.__has_handle(self.note_map, handle) - - def has_event_handle(self, handle): - """ - Return True if the handle exists in the current Event database. - """ - return self.__has_handle(self.event_map, handle) - - def has_place_handle(self, handle): - """ - Return True if the handle exists in the current Place database. - """ - return self.__has_handle(self.place_map, handle) - - def has_source_handle(self, handle): - """ - Return True if the handle exists in the current Source database. - """ - return self.__has_handle(self.source_map, handle) - - def has_citation_handle(self, handle): - """ - Return True if the handle exists in the current Citation database. - """ - return self.__has_handle(self.citation_map, handle) - - def has_tag_handle(self, handle): - """ - Return True if the handle exists in the current Tag database. - """ - return self.__has_handle(self.tag_map, handle) - - def __has_gramps_id(self, id_map, gramps_id): - """ - Helper function for has__gramps_id methods - """ - if gramps_id is None: - return False - gramps_id = gramps_id.encode('utf-8') - try: - return id_map.get(gramps_id, txn=self.txn) is not None - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - - def has_person_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Person table. - """ - return self.__has_gramps_id(self.id_trans, gramps_id) - - def has_family_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Family table. - """ - return self.__has_gramps_id(self.fid_trans, gramps_id) - - def has_media_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Media table. - """ - return self.__has_gramps_id(self.oid_trans, gramps_id) - - def has_repository_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Repository table. - """ - return self.__has_gramps_id(self.rid_trans, gramps_id) - - def has_note_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Note table. - """ - return self.__has_gramps_id(self.nid_trans, gramps_id) - - def has_event_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Event table. - """ - return self.__has_gramps_id(self.eid_trans, gramps_id) - - def has_place_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Place table. - """ - return self.__has_gramps_id(self.pid_trans, gramps_id) - - def has_source_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Source table. - """ - return self.__has_gramps_id(self.sid_trans, gramps_id) - - def has_citation_gramps_id(self, gramps_id): - """ - Return True if the Gramps ID exists in the Citation table. - """ - return self.__has_gramps_id(self.cid_trans, gramps_id) - - def __sortbyperson_key(self, handle, locale=glocale): - handle = handle.encode('utf-8') - return locale.sort_key(find_fullname(handle, - self.person_map.get(handle))) - - def __sortbyfamily_key(self, handle, locale=glocale): - handle = handle.encode('utf-8') - data = self.family_map.get(handle) - data2 = data[2] - data3 = data[3] - if data2: # father handle - data2 = data2.encode('utf-8') - return locale.sort_key(find_fullname(data2, - self.person_map.get(data2))) - elif data3: # mother handle - data3 = data3.encode('utf-8') - return locale.sort_key(find_fullname(data3, - self.person_map.get(data3))) - return '' - - def __sortbyplace(self, first, second): - first = first.encode('utf-8') - second = second.encode('utf-8') - return glocale.strcoll(self.place_map.get(first)[2], - self.place_map.get(second)[2]) - - def __sortbyplace_key(self, place, locale=glocale): - place = place.encode('utf-8') - return locale.sort_key(self.place_map.get(place)[2]) - - def __sortbysource(self, first, second): - first = first.encode('utf-8') - second = second.encode('utf-8') - source1 = str(self.source_map[first][2]) - source2 = str(self.source_map[second][2]) - return glocale.strcoll(source1, source2) - - def __sortbysource_key(self, key, locale=glocale): - key = key.encode('utf-8') - source = str(self.source_map[key][2]) - return locale.sort_key(source) - - def __sortbycitation(self, first, second): - first = first.encode('utf-8') - second = second.encode('utf-8') - citation1 = str(self.citation_map[first][3]) - citation2 = str(self.citation_map[second][3]) - return glocale.strcoll(citation1, citation2) - - def __sortbycitation_key(self, key, locale=glocale): - key = key.encode('utf-8') - citation = str(self.citation_map[key][3]) - return locale.sort_key(citation) - - def __sortbymedia(self, first, second): - first = first.encode('utf-8') - second = second.encode('utf-8') - media1 = self.media_map[first][4] - media2 = self.media_map[second][4] - return glocale.strcoll(media1, media2) - - def __sortbymedia_key(self, key, locale=glocale): - key = key.encode('utf-8') - media = self.media_map[key][4] - return locale.sort_key(media) - - def __sortbytag(self, first, second): - first = first.encode('utf-8') - second = second.encode('utf-8') - tag1 = self.tag_map[first][1] - tag2 = self.tag_map[second][1] - return glocale.strcoll(tag1, tag2) - - def __sortbytag_key(self, key, locale=glocale): - key = key.encode('utf-8') - tag = self.tag_map[key][1] - return locale.sort_key(tag) - - def set_mediapath(self, path): - """Set the default media path for database.""" - if (self.metadata is not None) and (not self.readonly): - self.metadata[b'mediapath'] = path - - def get_mediapath(self): - """Return the default media path of the database.""" - if self.metadata is not None: - return self.metadata.get(b'mediapath', None) - return None - - def find_backlink_handles(self, handle, include_classes=None): - """ - Find all objects that hold a reference to the object handle. - - Returns an interator over alist of (class_name, handle) tuples. - - :param handle: handle of the object to search for. - :type handle: database handle - :param include_classes: list of class names to include in the results. - Defaults to None, which includes all classes. - :type include_classes: list of class names - - This default implementation does a sequencial scan through all - the primary object databases and is very slow. Backends can - override this method to provide much faster implementations that - make use of additional capabilities of the backend. - - Note that this is a generator function, it returns a iterator for - use in loops. If you want a list of the results use:: - - result_list = list(find_backlink_handles(handle)) - """ - assert False, "read:find_backlink_handles -- shouldn't get here!!!" - # Make a dictionary of the functions and classes that we need for - # each of the primary object tables. - primary_tables = { - 'Person': { - 'cursor_func': self.get_person_cursor, - 'class_func': Person, - }, - 'Family': { - 'cursor_func': self.get_family_cursor, - 'class_func': Family, - }, - 'Event': { - 'cursor_func': self.get_event_cursor, - 'class_func': Event, - }, - 'Place': { - 'cursor_func': self.get_place_cursor, - 'class_func': Place, - }, - 'Source': { - 'cursor_func': self.get_source_cursor, - 'class_func': Source, - }, - 'Citation': { - 'cursor_func': self.get_citation_cursor, - 'class_func': Citation, - }, - 'Media': { - 'cursor_func': self.get_media_cursor, - 'class_func': Media, - }, - 'Repository': { - 'cursor_func': self.get_repository_cursor, - 'class_func': Repository, - }, - 'Note': { - 'cursor_func': self.get_note_cursor, - 'class_func': Note, - }, - 'Tag': { - 'cursor_func': self.get_tag_cursor, - 'class_func': Tag, - }, - } - - # Find which tables to iterate over - if (include_classes is None): - the_tables = list(primary_tables.keys()) - else: - the_tables = include_classes - - # Now we use the functions and classes defined above to loop through - # each of the existing primary object tables - for primary_table_name, funcs in the_tables.items(): - with funcs['cursor_func']() as cursor: - - # Grab the real object class here so that the lookup does - # not happen inside the main loop. - class_func = funcs['class_func'] - for found_handle, val in cursor: - obj = class_func() - obj.unserialize(val) - - # Now we need to loop over all object types - # that have been requests in the include_classes list - for classname in primary_tables: - if obj.has_handle_reference(classname, handle): - yield (primary_table_name, found_handle) - return - - def report_bm_change(self): - """ - Add 1 to the number of bookmark changes during this session. - """ - self._bm_changes += 1 - - def db_has_bm_changes(self): - """ - Return whethere there were bookmark changes during the session. - """ - return self._bm_changes > 0 - - def get_dbid(self): - """ - In BSDDB, we use the file directory name as the unique ID for - this database on this computer. - """ - return None - - def get_dbname(self): - """ - In BSDDB, the database is in a text file at the path - """ - filepath = os.path.join(self.path, "name.txt") - try: - with open(filepath, "r", encoding='utf-8') as name_file: - name = name_file.readline().strip() - except (OSError, IOError) as msg: - self.__log_error() - name = None - return name - - def get_summary(self): - """ - Returns dictionary of summary item. - Should include, if possible: - - _("Number of people") - _("Version") - _("Schema version") - """ - return { - _("Number of people"): self.get_number_of_people(), - _("Number of families"): self.get_number_of_families(), - _("Number of sources"): self.get_number_of_sources(), - _("Number of citations"): self.get_number_of_citations(), - _("Number of events"): self.get_number_of_events(), - _("Number of media"): self.get_number_of_media(), - _("Number of places"): self.get_number_of_places(), - _("Number of repositories"): self.get_number_of_repositories(), - _("Number of notes"): self.get_number_of_notes(), - _("Number of tags"): self.get_number_of_tags(), - } diff --git a/gramps/plugins/db/bsddb/summary.py b/gramps/plugins/db/bsddb/summary.py deleted file mode 100644 index e73908128..000000000 --- a/gramps/plugins/db/bsddb/summary.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2016 Douglas S. Blank -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -## Removed from clidbman.py -## specific to bsddb - -import os -from bsddb3 import dbshelve, db - -from gramps.gen.db import META, PERSON_TBL -from gramps.gen.db.dbconst import BDBVERSFN - -import logging -LOG = logging.getLogger(".dbsummary") - -def get_dbdir_summary(dirpath, name): - """ - Returns (people_count, bsddb_version, schema_version) of - current DB. - Returns ("Unknown", "Unknown", "Unknown") if invalid DB or other error. - """ - - bdbversion_file = os.path.join(dirpath, BDBVERSFN) - if os.path.isfile(bdbversion_file): - with open(bdbversion_file) as vers_file: - bsddb_version = vers_file.readline().strip() - else: - return "Unknown", "Unknown", "Unknown" - - current_bsddb_version = str(db.version()) - if bsddb_version != current_bsddb_version: - return "Unknown", bsddb_version, "Unknown" - - env = db.DBEnv() - flags = db.DB_CREATE | db.DB_PRIVATE |\ - db.DB_INIT_MPOOL |\ - db.DB_INIT_LOG | db.DB_INIT_TXN - try: - env.open(dirpath, flags) - except Exception as msg: - LOG.warning("Error opening db environment for '%s': %s" % - (name, str(msg))) - try: - env.close() - except Exception as msg: - LOG.warning("Error closing db environment for '%s': %s" % - (name, str(msg))) - return "Unknown", bsddb_version, "Unknown" - dbmap1 = dbshelve.DBShelf(env) - fname = os.path.join(dirpath, META + ".db") - try: - dbmap1.open(fname, META, db.DB_HASH, db.DB_RDONLY) - except: - env.close() - return "Unknown", bsddb_version, "Unknown" - schema_version = dbmap1.get(b'version', default=None) - dbmap1.close() - dbmap2 = dbshelve.DBShelf(env) - fname = os.path.join(dirpath, PERSON_TBL + ".db") - try: - dbmap2.open(fname, PERSON_TBL, db.DB_HASH, db.DB_RDONLY) - except: - env.close() - return "Unknown", bsddb_version, schema_version - count = len(dbmap2) - dbmap2.close() - env.close() - return (count, bsddb_version, schema_version) diff --git a/gramps/plugins/db/bsddb/test/__init__.py b/gramps/plugins/db/bsddb/test/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/gramps/plugins/db/bsddb/test/cursor_test.py b/gramps/plugins/db/bsddb/test/cursor_test.py deleted file mode 100644 index e90e16fd4..000000000 --- a/gramps/plugins/db/bsddb/test/cursor_test.py +++ /dev/null @@ -1,228 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -import unittest -import os -import tempfile -import shutil - -from bsddb3 import dbshelve, db - -from ..read import DbBsddbTreeCursor - -class Data: - - def __init__(self, handle,surname, name): - self.handle = handle - self.surname = surname - self.name = name - -## def __repr__(self): -## return repr((self.handle,self.surname,self.name)) - -class CursorTest(unittest.TestCase): - """Test the cursor handling.""" - - def setUp(self): - self._tmpdir = tempfile.mkdtemp() - self.full_name = os.path.join(self._tmpdir,'test.grdb') - self.env = db.DBEnv() - self.env.set_cachesize(0,0x2000000) - self.env.set_lk_max_locks(25000) - self.env.set_lk_max_objects(25000) - - # clean up unused logs - autoremove_flag = None - autoremove_method = None - for flag in ["DB_LOG_AUTO_REMOVE", "DB_LOG_AUTOREMOVE"]: - if hasattr(db, flag): - autoremove_flag = getattr(db, flag) - break - for method in ["log_set_config", "set_flags"]: - if hasattr(self.env, method): - autoremove_method = getattr(self.env, method) - break - if autoremove_method and autoremove_flag: - autoremove_method(autoremove_flag, 1) - - # The DB_PRIVATE flag must go if we ever move to multi-user setup - env_flags = db.DB_CREATE|db.DB_RECOVER|db.DB_PRIVATE|\ - db.DB_INIT_MPOOL|db.DB_INIT_LOCK|\ - db.DB_INIT_LOG|db.DB_INIT_TXN - - env_name = "%s/env" % (self._tmpdir,) - if not os.path.isdir(env_name): - os.mkdir(env_name) - self.env.open(env_name,env_flags) - (self.person_map,self.surnames) = self._open_tables() - (self.place_map, self.placerefs) = self._open_treetables() - - def _open_tables(self): - dbmap = dbshelve.DBShelf(self.env) - dbmap.db.set_pagesize(16384) - dbmap.open(self.full_name, 'person', db.DB_HASH, - db.DB_CREATE|db.DB_AUTO_COMMIT, 0o666) - person_map = dbmap - - table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT - - surnames = db.DB(self.env) - surnames.set_flags(db.DB_DUP|db.DB_DUPSORT) - surnames.open(self.full_name, "surnames", db.DB_BTREE, - flags=table_flags) - - def find_surname(key,data): - val = data.surname - if isinstance(val, str): - val = val.encode('utf-8') - return val - - person_map.associate(surnames, find_surname, table_flags) - - return (person_map,surnames) - - def _open_treetables(self): - dbmap = dbshelve.DBShelf(self.env) - dbmap.db.set_pagesize(16384) - dbmap.open(self.full_name, 'places', db.DB_HASH, - db.DB_CREATE|db.DB_AUTO_COMMIT, 0o666) - place_map = dbmap - - table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT - - placerefs = db.DB(self.env) - placerefs.set_flags(db.DB_DUP|db.DB_DUPSORT) - placerefs.open(self.full_name, "placerefs", db.DB_BTREE, - flags=table_flags) - - def find_placeref(key,data): - val = data[2] - if isinstance(val, str): - val = val.encode('utf-8') - return val - - place_map.associate(placerefs, find_placeref, table_flags) - - return (place_map, placerefs) - - def tearDown(self): - self.person_map.close() - self.surnames.close() - self.place_map.close() - self.placerefs.close() - self.env.close() - shutil.rmtree(self._tmpdir) - - def test_simple_insert(self): - """test insert and retrieve works.""" - - data = Data(b'1' ,'surname1', 'name1') - the_txn = self.env.txn_begin() - self.person_map.put(data.handle, data, txn=the_txn) - the_txn.commit() - - v = self.person_map.get(data.handle) - - self.assertEqual(v.handle, data.handle) - - def test_insert_with_cursor_closed(self): - """test_insert_with_cursor_closed""" - - cursor_txn = self.env.txn_begin() - - cursor = self.surnames.cursor(txn=cursor_txn) - cursor.first() - cursor.next() - cursor.close() - cursor_txn.commit() - - data = Data(b'2', 'surname2', 'name2') - the_txn = self.env.txn_begin() - self.person_map.put(data.handle, data, txn=the_txn) - the_txn.commit() - - v = self.person_map.get(data.handle) - - self.assertEqual(v.handle, data.handle) - - def test_insert_with_cursor_open(self): - """test_insert_with_cursor_open""" - - cursor_txn = self.env.txn_begin() - cursor = self.surnames.cursor(txn=cursor_txn) - cursor.first() - cursor.next() - - data = Data(b'2', 'surname2', 'name2') - self.person_map.put(data.handle, data, txn=cursor_txn) - - cursor.close() - cursor_txn.commit() - - v = self.person_map.get(data.handle) - - self.assertEqual(v.handle, data.handle) - - def test_insert_with_cursor_open_and_db_open(self): - """test_insert_with_cursor_open_and_db_open""" - - (person2,surnames2) = self._open_tables() - - cursor_txn = self.env.txn_begin() - cursor = surnames2.cursor(txn=cursor_txn) - cursor.first() - cursor.next() - - data = Data(b'2', 'surname2', 'name2') - self.person_map.put(data.handle, data, txn=cursor_txn) - - cursor.close() - cursor_txn.commit() - - v = self.person_map.get(data.handle) - - self.assertEqual(v.handle, data.handle) - - def test_treecursor(self): - #fill with data - the_txn = self.env.txn_begin() - data = [('1', 'countryA', '' ), - ('2', 'localityA', '1' ), - ('3', 'localityB', '1' ), - ('4', 'countryB', '' ), - ('5', 'streetA', '2' ), - ('6', 'countryB', '' )] - for d in data: - self.place_map.put(d[0].encode('utf-8'), d, txn=the_txn) - the_txn.commit() - - cursor_txn = self.env.txn_begin() - cursor = DbBsddbTreeCursor(self.placerefs, self.place_map, False, - cursor_txn) - placenames = set([d[1] for handle, d in cursor]) - - cursor.close() - cursor_txn.commit() - pldata = set([d[1] for d in data]) - self.assertEqual(placenames, pldata) - - -if __name__ == '__main__': - unittest.main() diff --git a/gramps/plugins/db/bsddb/test/db_test.py b/gramps/plugins/db/bsddb/test/db_test.py deleted file mode 100644 index c6f3139f2..000000000 --- a/gramps/plugins/db/bsddb/test/db_test.py +++ /dev/null @@ -1,257 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -import unittest - -from .. import DbReadBase, DbWriteBase, DbBsddbRead, DbBsddb -from gramps.gen.proxy.proxybase import ProxyDbBase -from gramps.gen.proxy import LivingProxyDb - -class DbTest(unittest.TestCase): - READ_METHODS = [ - "close", - "db_has_bm_changes", - "find_backlink_handles", - "find_initial_person", - "find_next_event_gramps_id", - "find_next_family_gramps_id", - "find_next_note_gramps_id", - "find_next_media_gramps_id", - "find_next_person_gramps_id", - "find_next_place_gramps_id", - "find_next_repository_gramps_id", - "find_next_source_gramps_id", - "get_bookmarks", - "get_child_reference_types", - "get_dbid", - "get_dbname", - "get_default_handle", - "get_default_person", - "get_event_attribute_types", - "get_event_bookmarks", - "get_event_cursor", - "get_event_from_gramps_id", - "get_event_from_handle", - "get_event_handles", - "get_event_roles", - "get_event_types", - "get_family_attribute_types", - "get_family_bookmarks", - "get_family_cursor", - "get_family_from_gramps_id", - "get_family_from_handle", - "get_family_handles", - "get_family_relation_types", - "get_media_attribute_types", - "get_media_bookmarks", - "get_media_cursor", - "get_media_handles", - "get_mediapath", - "get_name_group_keys", - "get_name_group_mapping", - "get_name_types", - "get_note_bookmarks", - "get_note_cursor", - "get_note_from_gramps_id", - "get_note_from_handle", - "get_note_handles", - "get_note_types", - "get_number_of_events", - "get_number_of_families", - "get_number_of_media", - "get_number_of_notes", - "get_number_of_people", - "get_number_of_places", - "get_number_of_repositories", - "get_number_of_sources", - "get_number_of_citations", - "get_number_of_tags", - "get_media_from_gramps_id", - "get_media_from_handle", - "get_person_attribute_types", - "get_person_cursor", - "get_person_from_gramps_id", - "get_person_from_handle", - "get_person_handles", - "get_place_bookmarks", - "get_place_cursor", - "get_place_from_gramps_id", - "get_place_from_handle", - "get_place_handles", - "get_raw_event_data", - "get_raw_family_data", - "get_raw_note_data", - "get_raw_media_data", - "get_raw_person_data", - "get_raw_place_data", - "get_raw_repository_data", - "get_raw_source_data", - "get_raw_tag_data", - "get_repo_bookmarks", - "get_repository_cursor", - "get_repository_from_gramps_id", - "get_repository_from_handle", - "get_repository_handles", - "get_repository_types", - "get_researcher", - "get_save_path", - "get_source_bookmarks", - "get_source_cursor", - "get_source_from_gramps_id", - "get_source_from_handle", - "get_source_handles", - "get_source_media_types", - "get_tag_cursor", - "get_tag_from_name", - "get_tag_from_handle", - "get_tag_handles", - "get_surname_list", - "get_url_types", - "has_event_handle", - "has_family_handle", - "has_name_group_key", - "has_note_handle", - "has_media_handle", - "has_person_handle", - "has_place_handle", - "has_repository_handle", - "has_source_handle", - "has_tag_handle", - "is_open", - "iter_event_handles", - "iter_events", - "iter_families", - "iter_family_handles", - "iter_media_handles", - "iter_media", - "iter_note_handles", - "iter_notes", - "iter_people", - "iter_person_handles", - "iter_place_handles", - "iter_places", - "iter_repositories", - "iter_repository_handles", - "iter_source_handles", - "iter_sources", - "iter_tag_handles", - "iter_tags", - "load", - "report_bm_change", - "request_rebuild", - # Prefix: - "set_event_id_prefix", - "set_family_id_prefix", - "set_note_id_prefix", - "set_media_id_prefix", - "set_person_id_prefix", - "set_place_id_prefix", - "set_prefixes", - "set_repository_id_prefix", - "set_source_id_prefix", - # Other set methods: - "set_mediapath", - "set_researcher", - "version_supported", - ] - - WRITE_METHODS = [ - "add_event", - "add_family", - "add_note", - "add_media", - "add_person", - "add_place", - "add_repository", - "add_source", - "add_tag", - "add_to_surname_list", - "commit_event", - "commit_family", - "commit_media", - "commit_note", - "commit_person", - "commit_place", - "commit_repository", - "commit_source", - "commit_tag", - "rebuild_secondary", - "reindex_reference_map", - "remove_event", - "remove_family", - "remove_from_surname_list", - "remove_note", - "remove_media", - "remove_person", - "remove_place", - "remove_repository", - "remove_source", - "remove_tag", - "set_default_person_handle", - "set_name_group_mapping", - "transaction_begin", - "transaction_commit", - ] - - def _verify_readonly(self, db): - for method in self.READ_METHODS: - self.assertTrue(hasattr(db, method), - ("readonly should have a '%s' method" % method)) - for method in self.WRITE_METHODS: - self.assertFalse(hasattr(db, method), - ("readonly should NOT have a '%s' method" % method)) - - def _verify_readwrite(self, db): - for method in self.READ_METHODS: - self.assertTrue(hasattr(db, method), - ("readwrite should have a '%s' method" % method)) - for method in self.WRITE_METHODS: - self.assertTrue(hasattr(db, method), - ("readwrite should have a '%s' method" % method)) - - def test_verify_readbase(self): - db = DbReadBase() - self._verify_readonly(db) - - def test_verify_writebase(self): - db = DbWriteBase() - self._verify_readwrite(db) - - def test_verify_read(self): - db = DbBsddbRead() - self._verify_readonly(db) - - def test_verify_write(self): - db = DbBsddb() - self._verify_readwrite(db) - - def test_verify_proxy(self): - gdb = DbBsddb() - db = ProxyDbBase(gdb) - self._verify_readonly(db) - - def test_verify_living(self): - gdb = DbBsddb() - db = LivingProxyDb(gdb, LivingProxyDb.MODE_EXCLUDE_ALL) - self._verify_readonly(db) - - -if __name__ == "__main__": - unittest.main() diff --git a/gramps/plugins/db/bsddb/test/grampsdbtestbase.py b/gramps/plugins/db/bsddb/test/grampsdbtestbase.py deleted file mode 100644 index 44a8b5a29..000000000 --- a/gramps/plugins/db/bsddb/test/grampsdbtestbase.py +++ /dev/null @@ -1,164 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -import unittest - -from .. import DbBsddb, DbTxn -from gramps.cli.clidbman import CLIDbManager -from gramps.gen.dbstate import DbState -from gramps.gen.db.utils import make_database -from gramps.gen.lib import (Source, RepoRef, Citation, Repository, Person, - Family, Event, Place, Media) - -class GrampsDbBaseTest(unittest.TestCase): - """Base class for unittest that need to be able to create - test databases.""" - - def setUp(self): - def dummy_callback(dummy): - pass - - self.dbstate = DbState() - self.dbman = CLIDbManager(self.dbstate) - dirpath, name = self.dbman.create_new_db_cli("Test: bsddb", dbid="bsddb") - self._db = make_database("bsddb") - self._db.load(dirpath, None) - - def tearDown(self): - self._db.close() - self.dbman.remove_database("Test: bsddb") - - def _populate_database(self, - num_sources = 1, - num_persons = 0, - num_families = 0, - num_events = 0, - num_places = 0, - num_media = 0, - num_links = 1): - # start with sources - sources = [] - for i in range(num_sources): - sources.append(self._add_source()) - - # now for each of the other tables. Give each entry a link - # to num_link sources, sources are chosen on a round robin - # basis - - for num, add_func in ((num_persons, self._add_person_with_sources), - (num_families, self._add_family_with_sources), - (num_events, self._add_event_with_sources), - (num_places, self._add_place_with_sources), - (num_media, self._add_media_with_sources)): - - source_idx = 1 - for person_idx in range(num): - - # Get the list of sources to link - lnk_sources = set() - for i in range(num_links): - lnk_sources.add(sources[source_idx-1]) - source_idx = (source_idx+1) % len(sources) - - try: - add_func(lnk_sources) - except: - print ("person_idx = ", person_idx) - print ("lnk_sources = ", repr(lnk_sources)) - raise - - return - - def _add_source(self,repos=None): - # Add a Source - - with DbTxn("Add Source and Citation", self._db) as tran: - source = Source() - if repos is not None: - repo_ref = RepoRef() - repo_ref.set_reference_handle(repos.get_handle()) - source.add_repo_reference(repo_ref) - self._db.add_source(source, tran) - self._db.commit_source(source, tran) - citation = Citation() - citation.set_reference_handle(source.get_handle()) - self._db.add_citation(citation, tran) - self._db.commit_citation(citation, tran) - - return citation - - def _add_repository(self): - # Add a Repository - - with DbTxn("Add Repository", self._db) as tran: - repos = Repository() - self._db.add_repository(repos, tran) - self._db.commit_repository(repos, tran) - - return repos - - - def _add_object_with_source(self, citations, object_class, add_method, - commit_method): - - object = object_class() - - with DbTxn("Add Object", self._db) as tran: - for citation in citations: - object.add_citation(citation.get_handle()) - add_method(object, tran) - commit_method(object, tran) - - return object - - def _add_person_with_sources(self, citations): - - return self._add_object_with_source(citations, - Person, - self._db.add_person, - self._db.commit_person) - - def _add_family_with_sources(self, citations): - - return self._add_object_with_source(citations, - Family, - self._db.add_family, - self._db.commit_family) - - def _add_event_with_sources(self, citations): - - return self._add_object_with_source(citations, - Event, - self._db.add_event, - self._db.commit_event) - - def _add_place_with_sources(self, citations): - - return self._add_object_with_source(citations, - Place, - self._db.add_place, - self._db.commit_place) - - def _add_media_with_sources(self, citations): - - return self._add_object_with_source(citations, - Media, - self._db.add_media, - self._db.commit_media) diff --git a/gramps/plugins/db/bsddb/test/reference_map_test.py b/gramps/plugins/db/bsddb/test/reference_map_test.py deleted file mode 100644 index 1b6321ab7..000000000 --- a/gramps/plugins/db/bsddb/test/reference_map_test.py +++ /dev/null @@ -1,219 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2007 Donald N. Allingham -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -import unittest -import logging -import time - -from .. import DbTxn -from gramps.gen.lib import Person, Event, Source, Citation -from gramps.gen.errors import HandleError - -logger = logging.getLogger('Gramps.GrampsDbBase_Test') - -from .grampsdbtestbase import GrampsDbBaseTest - -class ReferenceMapTest(GrampsDbBaseTest): - """Test methods on the GrampsDbBase class that are related to the reference_map - index implementation.""" - - def test_simple_lookup(self): - """insert a record and a reference and check that - a lookup for the reference returns the original - record.""" - - citation = self._add_source() - person = self._add_person_with_sources([citation]) - - references = list(self._db.find_backlink_handles(citation.get_handle())) - - self.assertEqual(len(references), 1) - self.assertEqual(references[0], (Person.__name__, person.get_handle())) - - def test_backlink_for_repository(self): - """check that the citation /source / repos backlink lookup works.""" - - repos = self._add_repository() - citation = self._add_source(repos=repos) - - references = list(self._db.find_backlink_handles(repos.get_handle())) - - self.assertEqual(len(references), 1) - self.assertEqual(references[0][0], Source.__name__) - - references = list(self._db.find_backlink_handles(references[0][1])) - - self.assertEqual(len(references), 1) - self.assertEqual(references[0], - (Citation.__name__, citation.get_handle())) - - def test_class_limited_lookup(self): - """check that class limited lookups work.""" - - citation = self._add_source() - person = self._add_person_with_sources([citation]) - - self._add_family_with_sources([citation]) - self._add_event_with_sources([citation]) - self._add_place_with_sources([citation]) - self._add_media_with_sources([citation]) - - # make sure that we have the correct number of references (one for each object) - references = list(self._db.find_backlink_handles(citation.get_handle())) - - self.assertEqual(len(references), 5, - "len(references) == %s " % str(len(references))) - - # should just return the person reference - references = [ref for ref in self._db.find_backlink_handles(citation.get_handle(), (Person.__name__,))] - self.assertEqual(len(references), 1, - "len(references) == %s " % str(len(references))) - self.assertEqual(references[0][0], Person.__name__, - "references = %s" % repr(references)) - - # should just return the person and event reference - references = list(self._db.find_backlink_handles(citation.get_handle(), - (Person.__name__, Event.__name__))) - self.assertEqual(len(references), 2, - "len(references) == %s " % str(len(references))) - self.assertEqual(references[0][0], Person.__name__, - "references = %s" % repr(references)) - self.assertEqual(references[1][0], Event.__name__, - "references = %s" % repr(references)) - - def test_delete_primary(self): - """check that deleting a primary will remove the backreferences - from the reference_map""" - - citation = self._add_source() - person = self._add_person_with_sources([citation]) - - self.assertIsNotNone(self._db.get_person_from_handle(person.get_handle())) - - with DbTxn("Del Person", self._db) as tran: - self._db.remove_person(person.get_handle(),tran) - - self.assertRaises(HandleError, self._db.get_person_from_handle, - person.get_handle()) - - references = list(self._db.find_backlink_handles(citation.get_handle())) - - self.assertEqual(len(references), 0, - "len(references) == %s " % str(len(references))) - - def test_reindex_reference_map(self): - """Test that the reindex function works.""" - - def cb(count): - pass - - # unhook the reference_map update function so that we - # can insert some records without the reference_map being updated. - update_method = self._db._update_reference_map - self._db._update_reference_map = lambda x,y,z: 1 - - # Insert a person/source pair. - citation = self._add_source() - person = self._add_person_with_sources([citation]) - - # Check that the reference map does not contain the reference. - references = list(self._db.find_backlink_handles(citation.get_handle())) - - self.assertEqual(len(references), 0, - "len(references) == %s " % str(len(references))) - - # Reinstate the reference_map method and reindex the database - self._db._update_reference_map = update_method - self._db.reindex_reference_map(cb) - - # Check that the reference now appears in the reference_map - references = list(self._db.find_backlink_handles(citation.get_handle())) - - self.assertEqual(len(references), 1, - "len(references) == %s " % str(len(references))) - - def perf_simple_search_speed(self): - """ - This doesn't work any more due to multiply inheritance changes. - """ - - num_sources = 100 - num_persons = 1000 - num_families = 10 - num_events = 10 - num_places = 10 - num_media = 10 - num_links = 10 - - self._populate_database(num_sources, - num_persons, - num_families, - num_events, - num_places, - num_media, - num_links) - - - # time searching for source backrefs with and without reference_map - cur = self._db.get_source_cursor() - handle,data = cur.first() - cur.close() - - start = time.time() - references = list(self._db.find_backlink_handles(handle)) - end = time.time() - - with_reference_map = end - start - - remember = self._db.__class__.find_backlink_handles - - self._db.__class__.find_backlink_handles = self._db.__class__.__base__.find_backlink_handles - - start = time.time() - references = list(self._db.find_backlink_handles(handle)) - end = time.time() - - without_reference_map = end - start - - self._db.__class__.find_backlink_handles = remember - - logger.info("search test with following data: \n" - "num_sources = %d \n" - "num_persons = %d \n" - "num_families = %d \n" - "num_events = %d \n" - "num_places = %d \n" - "num_media = %d \n" - "num_links = %d" % (num_sources, - num_persons, - num_families, - num_events, - num_places, - num_media, - num_links)) - logger.info("with refs %s\n", str(with_reference_map)) - logger.info("without refs %s\n", str(without_reference_map)) - - self.assertLess(with_reference_map, without_reference_map / 10, - "Reference_map should an order of magnitude faster.") - - -if __name__ == '__main__': - unittest.main() diff --git a/gramps/plugins/db/bsddb/undoredo.py b/gramps/plugins/db/bsddb/undoredo.py deleted file mode 100644 index 53ab39a60..000000000 --- a/gramps/plugins/db/bsddb/undoredo.py +++ /dev/null @@ -1,548 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2004-2006 Donald N. Allingham -# Copyright (C) 2011 Tim G L Lyons -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -""" -Exports the DbUndo class for managing Gramps transactions -undos and redos. -""" - -#------------------------------------------------------------------------- -# -# Standard python modules -# -#------------------------------------------------------------------------- -import time, os -import pickle -from collections import deque - -try: - from bsddb3 import db -except: - # FIXME: make this more abstract to deal with other backends - class db: - DBRunRecoveryError = 0 - DBAccessError = 0 - DBPageNotFoundError = 0 - DBInvalidArgError = 0 - -from gramps.gen.const import GRAMPS_LOCALE as glocale -_ = glocale.translation.gettext - -#------------------------------------------------------------------------- -# -# Gramps modules -# -#------------------------------------------------------------------------- -from gramps.gen.db.dbconst import (REFERENCE_KEY, KEY_TO_NAME_MAP, TXNDEL, - TXNADD, TXNUPD) -from . import BSDDBTxn -from gramps.gen.errors import DbError - -#------------------------------------------------------------------------- -# -# Local Constants -# -#------------------------------------------------------------------------- -DBERRS = (db.DBRunRecoveryError, db.DBAccessError, - db.DBPageNotFoundError, db.DBInvalidArgError) - -_SIGBASE = ('person', 'family', 'source', 'event', 'media', - 'place', 'repository', 'reference', 'note', 'tag', 'citation') - -#------------------------------------------------------------------------- -# -# DbUndo class -# -#------------------------------------------------------------------------- -class DbUndo: - """ - Base class for the Gramps undo/redo manager. Needs to be subclassed - for use with a real backend. - """ - - __slots__ = ('undodb', 'db', 'mapbase', 'undo_history_timestamp', - 'txn', 'undoq', 'redoq') - - def __init__(self, grampsdb): - """ - Class constructor. Set up main instance variables - """ - self.db = grampsdb - self.undoq = deque() - self.redoq = deque() - self.undo_history_timestamp = time.time() - self.txn = None - # N.B. the databases have to be in the same order as the numbers in - # xxx_KEY in gen/db/dbconst.py - self.mapbase = ( - self.db.person_map, - self.db.family_map, - self.db.source_map, - self.db.event_map, - self.db.media_map, - self.db.place_map, - self.db.repository_map, - self.db.reference_map, - self.db.note_map, - self.db.tag_map, - self.db.citation_map, - ) - - def clear(self): - """ - Clear the undo/redo list (but not the backing storage) - """ - self.undoq.clear() - self.redoq.clear() - self.undo_history_timestamp = time.time() - self.txn = None - - def __enter__(self, value): - """ - Context manager method to establish the context - """ - self.open(value) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - Context manager method to finish the context - """ - if exc_type is None: - self.close() - return exc_type is None - - def open(self, value): - """ - Open the backing storage. Needs to be overridden in the derived - class. - """ - raise NotImplementedError - - def close(self): - """ - Close the backing storage. Needs to be overridden in the derived - class. - """ - raise NotImplementedError - - def append(self, value): - """ - Add a new entry on the end. Needs to be overridden in the derived - class. - """ - raise NotImplementedError - - def __getitem__(self, index): - """ - Returns an entry by index number. Needs to be overridden in the - derived class. - """ - raise NotImplementedError - - def __setitem__(self, index, value): - """ - Set an entry to a value. Needs to be overridden in the derived class. - """ - raise NotImplementedError - - def __len__(self): - """ - Returns the number of entries. Needs to be overridden in the derived - class. - """ - raise NotImplementedError - - def commit(self, txn, msg): - """ - Commit the transaction to the undo/redo database. "txn" should be - an instance of Gramps transaction class - """ - txn.set_description(msg) - txn.timestamp = time.time() - self.undoq.append(txn) - - def undo(self, update_history=True): - """ - Undo a previously committed transaction - """ - if self.db.readonly or self.undo_count == 0: - return False - return self.__undo(update_history) - - def redo(self, update_history=True): - """ - Redo a previously committed, then undone, transaction - """ - if self.db.readonly or self.redo_count == 0: - return False - return self.__redo(update_history) - - def undoredo(func): - """ - Decorator function to wrap undo and redo operations within a bsddb - transaction. It also catches bsddb errors and raises an exception - as appropriate - """ - def try_(self, *args, **kwargs): - try: - with BSDDBTxn(self.db.env) as txn: - self.txn = self.db.txn = txn.txn - status = func(self, *args, **kwargs) - if not status: - txn.abort() - self.db.txn = None - return status - - except DBERRS as msg: - self.db._log_error() - raise DbError(msg) - - return try_ - - @undoredo - def __undo(self, update_history=True): - """ - Access the last committed transaction, and revert the data to the - state before the transaction was committed. - """ - txn = self.undoq.pop() - self.redoq.append(txn) - transaction = txn - subitems = transaction.get_recnos(reverse=True) - # sigs[obj_type][trans_type] - sigs = [[[] for trans_type in range(3)] for key in range(11)] - - # Process all records in the transaction - for record_id in subitems: - (key, trans_type, handle, old_data, new_data) = \ - pickle.loads(self.undodb[record_id]) - - if key == REFERENCE_KEY: - self.undo_reference(old_data, handle, self.mapbase[key]) - else: - self.undo_data(old_data, handle, self.mapbase[key]) - handle = handle.decode('utf-8') - sigs[key][trans_type].append(handle) - # now emit the signals - self.undo_sigs(sigs, True) - - # Notify listeners - if self.db.undo_callback: - if self.undo_count > 0: - self.db.undo_callback(_("_Undo %s") - % self.undoq[-1].get_description()) - else: - self.db.undo_callback(None) - - if self.db.redo_callback: - self.db.redo_callback(_("_Redo %s") - % transaction.get_description()) - - if update_history and self.db.undo_history_callback: - self.db.undo_history_callback() - return True - - @undoredo - def __redo(self, db=None, update_history=True): - """ - Access the last undone transaction, and revert the data to the state - before the transaction was undone. - """ - txn = self.redoq.pop() - self.undoq.append(txn) - transaction = txn - subitems = transaction.get_recnos() - # sigs[obj_type][trans_type] - sigs = [[[] for trans_type in range(3)] for key in range(11)] - - # Process all records in the transaction - for record_id in subitems: - (key, trans_type, handle, old_data, new_data) = \ - pickle.loads(self.undodb[record_id]) - - if key == REFERENCE_KEY: - self.undo_reference(new_data, handle, self.mapbase[key]) - else: - self.undo_data(new_data, handle, self.mapbase[key]) - handle = handle.decode('utf-8') - sigs[key][trans_type].append(handle) - # now emit the signals - self.undo_sigs(sigs, False) - - # Notify listeners - if self.db.undo_callback: - self.db.undo_callback(_("_Undo %s") - % transaction.get_description()) - - if self.db.redo_callback: - if self.redo_count > 1: - new_transaction = self.redoq[-2] - self.db.redo_callback(_("_Redo %s") - % new_transaction.get_description()) - else: - self.db.redo_callback(None) - - if update_history and self.db.undo_history_callback: - self.db.undo_history_callback() - return True - - def undo_reference(self, data, handle, db_map): - """ - Helper method to undo a reference map entry - """ - try: - if data is None: - db_map.delete(handle, txn=self.txn) - else: - db_map.put(handle, data, txn=self.txn) - - except DBERRS as msg: - self.db._log_error() - raise DbError(msg) - - def undo_data(self, data, handle, db_map): - """ - Helper method to undo/redo the changes made - """ - try: - if data is None: - db_map.delete(handle, txn=self.txn) - else: - db_map.put(handle, data, txn=self.txn) - - except DBERRS as msg: - self.db._log_error() - raise DbError(msg) - - def undo_sigs(self, sigs, undo): - """ - Helper method to undo/redo the signals for changes made - We want to do deletes and adds first - Note that if 'undo' we swap emits - """ - for trans_type in [TXNDEL, TXNADD, TXNUPD]: - for obj_type in range(11): - handles = sigs[obj_type][trans_type] - if handles: - if not undo and trans_type == TXNDEL \ - or undo and trans_type == TXNADD: - typ = '-delete' - else: - # don't update a handle if its been deleted, and note - # that 'deleted' handles are in the 'add' list if we - # are undoing - handles = [handle for handle in handles - if handle not in - sigs[obj_type][TXNADD if undo else TXNDEL]] - if ((not undo) and trans_type == TXNADD) \ - or (undo and trans_type == TXNDEL): - typ = '-add' - else: # TXNUPD - typ = '-update' - if handles: - self.db.emit(KEY_TO_NAME_MAP[obj_type] + typ, - (handles,)) - - undo_count = property(lambda self:len(self.undoq)) - redo_count = property(lambda self:len(self.redoq)) - -class DbUndoList(DbUndo): - """ - Implementation of the Gramps undo database using a Python list - """ - def __init__(self, grampsdb): - """ - Class constructor - """ - super(DbUndoList, self).__init__(grampsdb) - self.undodb = [] - - def open(self): - """ - A list does not need to be opened - """ - pass - - def close(self): - """ - Close the list by resetting it to empty - """ - self.undodb = [] - self.clear() - - def append(self, value): - """ - Add an entry on the end of the list - """ - self.undodb.append(value) - return len(self.undodb)-1 - - def __getitem__(self, index): - """ - Return an item at the specified index - """ - return self.undodb[index] - - def __setitem__(self, index, value): - """ - Set an item at the speficied index to the given value - """ - self.undodb[index] = value - - def __iter__(self): - """ - Iterator - """ - for item in self.undodb: - yield item - - def __len__(self): - """ - Return number of entries in the list - """ - return len(self.undodb) - -class DbUndoBSDDB(DbUndo): - """ - Class constructor for Gramps undo/redo database using a bsddb recno - database as the backing store. - """ - - def __init__(self, grampsdb, path): - """ - Class constructor - """ - super(DbUndoBSDDB, self).__init__(grampsdb) - self.undodb = db.DB() - self.path = path - - def open(self): - """ - Open the undo/redo database - """ - path = self.path - self.undodb.open(path, db.DB_RECNO, db.DB_CREATE) - - def close(self): - """ - Close the undo/redo database - """ - self.undodb.close() - self.undodb = None - self.mapbase = None - self.db = None - - try: - os.remove(self.path) - except OSError: - pass - self.clear() - - def append(self, value): - """ - Add an entry on the end of the database - """ - return self.undodb.append(value) - - def __len__(self): - """ - Returns the number of entries in the database - """ - x = self.undodb.stat()['nkeys'] - y = len(self.undodb) - assert x == y - return x - - def __getitem__(self, index): - """ - Returns the entry stored at the specified index - """ - return self.undodb.get(index) - - def __setitem__(self, index, value): - """ - Sets the entry stored at the specified index to the value given. - """ - self.undodb.put(index, value) - - def __iter__(self): - """ - Iterator - """ - cursor = self.undodb.cursor() - data = cursor.first() - while data: - yield data - data = next(cursor) - -def testundo(): - class T: - def __init__(self): - self.msg = '' - self.timetstamp = 0 - def set_description(self, msg): - self.msg = msg - - class D: - def __init__(self): - self.person_map = {} - self.family_map = {} - self.source_map = {} - self.event_map = {} - self.media_map = {} - self.place_map = {} - self.note_map = {} - self.tag_map = {} - self.repository_map = {} - self.reference_map = {} - - print("list tests") - undo = DbUndoList(D()) - print(undo.append('foo')) - print(undo.append('bar')) - print(undo[0]) - undo[0] = 'foobar' - print(undo[0]) - print("len", len(undo)) - print("iter") - for data in undo: - print(data) - print() - print("bsddb tests") - undo = DbUndoBSDDB(D(), '/tmp/testundo') - undo.open() - print(undo.append('foo')) - print(undo.append('fo2')) - print(undo.append('fo3')) - print(undo[1]) - undo[1] = 'bar' - print(undo[1]) - for data in undo: - print(data) - print("len", len(undo)) - - print("test commit") - undo.commit(T(), msg="test commit") - undo.close() - -if __name__ == '__main__': - testundo() diff --git a/gramps/plugins/db/bsddb/write.py b/gramps/plugins/db/bsddb/write.py deleted file mode 100644 index f1743fd21..000000000 --- a/gramps/plugins/db/bsddb/write.py +++ /dev/null @@ -1,2320 +0,0 @@ -# -# Gramps - a GTK+/GNOME based genealogy program -# -# Copyright (C) 2000-2008 Donald N. Allingham -# Copyright (C) 2010 Nick Hall -# Copyright (C) 2011 Tim G L Lyons -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -""" -Provide the Berkeley DB (DbBsddb) database backend for Gramps. -This is used since Gramps version 3.0 -""" - -#------------------------------------------------------------------------- -# -# Standard python modules -# -#------------------------------------------------------------------------- -import sys -import pickle -import os -import time -import bisect -from functools import wraps -import logging -from sys import maxsize, getfilesystemencoding, version_info -from ast import literal_eval as safe_eval - -from bsddb3 import dbshelve, db -from bsddb3.db import DB_CREATE, DB_AUTO_COMMIT, DB_DUP, DB_DUPSORT, DB_RDONLY - -DBFLAGS_O = DB_CREATE | DB_AUTO_COMMIT # Default flags for database open -DBFLAGS_R = DB_RDONLY # Flags to open a database read-only -DBFLAGS_D = DB_DUP | DB_DUPSORT # Default flags for duplicate keys - -#------------------------------------------------------------------------- -# -# Gramps modules -# -#------------------------------------------------------------------------- -from gramps.gen.lib.person import Person -from gramps.gen.lib.family import Family -from gramps.gen.lib.src import Source -from gramps.gen.lib.citation import Citation -from gramps.gen.lib.event import Event -from gramps.gen.lib.place import Place -from gramps.gen.lib.repo import Repository -from gramps.gen.lib.media import Media -from gramps.gen.lib.note import Note -from gramps.gen.lib.tag import Tag -from gramps.gen.lib.genderstats import GenderStats -from gramps.gen.lib.researcher import Researcher - -from . import (DbBsddbRead, DbWriteBase, BSDDBTxn, - DbTxn, BsddbBaseCursor, BsddbDowngradeError, DbVersionError, - DbEnvironmentError, DbUpgradeRequiredError, find_surname, - find_byte_surname, find_surname_name, DbUndoBSDDB as DbUndo) - -from gramps.gen.db import exceptions -from gramps.gen.db.dbconst import * -from gramps.gen.db.utils import write_lock_file, clear_lock_file -from gramps.gen.utils.callback import Callback -from gramps.gen.utils.id import create_id -from gramps.gen.updatecallback import UpdateCallback -from gramps.gen.errors import DbError, HandleError -from gramps.gen.const import HOME_DIR, GRAMPS_LOCALE as glocale -_ = glocale.translation.gettext - -_LOG = logging.getLogger(DBLOGNAME) -LOG = logging.getLogger(".citation") -#_LOG.setLevel(logging.DEBUG) -#_hdlr = logging.StreamHandler() -#_hdlr.setFormatter(logging.Formatter(fmt="%(name)s.%(levelname)s: %(message)s")) -#_LOG.addHandler(_hdlr) -_MINVERSION = 9 -_DBVERSION = 19 - -IDTRANS = "person_id" -FIDTRANS = "family_id" -PIDTRANS = "place_id" -OIDTRANS = "media_id" -EIDTRANS = "event_id" -RIDTRANS = "repo_id" -NIDTRANS = "note_id" -SIDTRANS = "source_id" -CIDTRANS = "citation_id" -TAGTRANS = "tag_name" -SURNAMES = "surnames" -NAME_GROUP = "name_group" -META = "meta_data" -PPARENT = "place_parent" - -FAMILY_TBL = "family" -PLACES_TBL = "place" -SOURCES_TBL = "source" -CITATIONS_TBL = "citation" -MEDIA_TBL = "media" -EVENTS_TBL = "event" -PERSON_TBL = "person" -REPO_TBL = "repo" -NOTE_TBL = "note" -TAG_TBL = "tag" - -REF_MAP = "reference_map" -REF_PRI = "primary_map" -REF_REF = "referenced_map" - -DBERRS = (db.DBRunRecoveryError, db.DBAccessError, - db.DBPageNotFoundError, db.DBInvalidArgError) - -# The following two dictionaries provide fast translation -# between the primary class names and the keys used to reference -# these classes in the database tables. Beware that changing -# these maps or modifying the values of the keys will break -# existing databases. - -#------------------------------------------------------------------------- -# -# Helper functions -# -#------------------------------------------------------------------------- - -def find_idmap(key, data): - """ return id for association of secondary index. - returns a byte string - """ - val = data[1] - if val is not None: - val = val.encode('utf-8') - return val - -def find_parent(key, data): - if hasattr(data[5], '__len__') and len(data[5]) > 0: - val = data[5][0][0] - else: - val = '' - return val.encode('utf-8') - -# Secondary database key lookups for reference_map table -# reference_map data values are of the form: -# ((primary_object_class_name, primary_object_handle), -# (referenced_object_class_name, referenced_object_handle)) - -def find_primary_handle(key, data): - """ return handle for association of indexes - returns byte string - """ - val = (data)[0][1] - return val.encode('utf-8') - -def find_referenced_handle(key, data): - """ return handle for association of indexes - returns byte string - """ - val = (data)[1][1] - return val.encode('utf-8') - -#------------------------------------------------------------------------- -# -# BsddbWriteCursor -# -#------------------------------------------------------------------------- -class BsddbWriteCursor(BsddbBaseCursor): - - def __init__(self, source, txn=None, **kwargs): - BsddbBaseCursor.__init__(self, txn=txn, **kwargs) - self.cursor = source.db.cursor(txn) - self.source = source - -#------------------------------------------------------------------------- -# -# DbBsddbAssocCursor -# -#------------------------------------------------------------------------- -class DbBsddbAssocCursor(BsddbBaseCursor): - - def __init__(self, source, txn=None, **kwargs): - BsddbBaseCursor.__init__(self, txn=txn, **kwargs) - self.cursor = source.cursor(txn) - self.source = source - -#------------------------------------------------------------------------- -# -# DbBsddb -# -#------------------------------------------------------------------------- -class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback): - """ - Gramps database write access object. - """ - - # Set up dictionary for callback signal handler - # --------------------------------------------- - # 1. Signals for primary objects - __signals__ = dict((obj+'-'+op, signal) - for obj in - ['person', 'family', 'event', 'place', - 'source', 'citation', 'media', 'note', 'repository', 'tag'] - for op, signal in zip( - ['add', 'update', 'delete', 'rebuild'], - [(list,), (list,), (list,), None] - ) - ) - - # 2. Signals for long operations - __signals__.update(('long-op-'+op, signal) for op, signal in zip( - ['start', 'heartbeat', 'end'], - [(object,), None, None] - )) - - # 3. Special signal for change in home person - __signals__['home-person-changed'] = None - - # 4. Signal for change in person group name, parameters are - __signals__['person-groupname-rebuild'] = (str, str) - - def __init__(self): - """Create a new GrampsDB.""" - - self.txn = None - DbBsddbRead.__init__(self) - DbWriteBase.__init__(self) - #UpdateCallback.__init__(self) - self.secondary_connected = False - self.has_changed = False - self.brief_name = None - self.update_env_version = False - self.update_python_version = False - self.update_pickle_version = False - - def catch_db_error(func): - """ - Decorator function for catching database errors. If *func* throws - one of the exceptions in DBERRS, the error is logged and a DbError - exception is raised. - """ - @wraps(func) - def try_(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - except DBERRS as msg: - self.__log_error() - raise DbError(msg) - return try_ - - def __open_db(self, file_name, table_name, dbtype=db.DB_HASH, flags=0): - dbmap = db.DB(self.env) - dbmap.set_flags(flags) - - fname = os.path.join(file_name, table_name + DBEXT) - - if self.readonly: - dbmap.open(fname, table_name, dbtype, DBFLAGS_R) - else: - dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) - return dbmap - - def __open_shelf(self, file_name, table_name, dbtype=db.DB_HASH): - dbmap = dbshelve.DBShelf(self.env) - - fname = os.path.join(file_name, table_name + DBEXT) - - if self.readonly: - dbmap.open(fname, table_name, dbtype, DBFLAGS_R) - else: - dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) - return dbmap - - def __log_error(self): - mypath = os.path.join(self.get_save_path(),DBRECOVFN) - with open(mypath, "w") as ofile: - pass - try: - clear_lock_file(self.get_save_path()) - except: - pass - - _log_error = __log_error - - # Override get_cursor method from the superclass to add udpate - # capability - - @catch_db_error - def get_cursor(self, table, txn=None, update=False, commit=False): - """ Helper function to return a cursor over a table """ - if update and not txn: - txn = self.env.txn_begin(self.txn) - return BsddbWriteCursor(table, txn=txn or self.txn, - update=update, commit=commit) - - # cursors for lookups in the reference_map for back reference - # lookups. The reference_map has three indexes: - # the main index: a tuple of (primary_handle, referenced_handle) - # the primary_handle index: the primary_handle - # the referenced_handle index: the referenced_handle - # the main index is unique, the others allow duplicate entries. - - @catch_db_error - def _get_reference_map_primary_cursor(self): - """ - Returns a reference to a cursor over the reference map primary map - """ - return DbBsddbAssocCursor(self.reference_map_primary_map, - self.txn) - - @catch_db_error - def _get_reference_map_referenced_cursor(self): - """ - Returns a reference to a cursor over the reference map referenced map - """ - return DbBsddbAssocCursor(self.reference_map_referenced_map, - self.txn) - - @catch_db_error - def get_place_parent_cursor(self): - """ - Returns a reference to a cursor over the place parents - """ - return DbBsddbAssocCursor(self.parents, self.txn) - - # These are overriding the DbBsddbRead's methods of saving metadata - # because we now have txn-capable metadata table - - @catch_db_error - def set_default_person_handle(self, handle): - """Set the default Person to the passed instance.""" - if not self.readonly: - # Start transaction - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'default', handle) - self.emit('home-person-changed') - - @catch_db_error - def get_default_person(self): - """Return the default Person of the database.""" - person_handle = self.get_default_handle() - if person_handle: - person = self.get_person_from_handle(person_handle) - if person: - return person - elif (self.metadata) and (not self.readonly): - # Start transaction - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'default', None) - return None - else: - return None - - def set_mediapath(self, path): - """Set the default media path for database.""" - if self.metadata and not self.readonly: - # Start transaction - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'mediapath', path) - - def __make_zip_backup(self, dirname): - import zipfile - # In Windows reserved characters is "<>:"/\|?*" - reserved_char = r':,<>"/\|?* ' - replace_char = "-__________" - title = self.get_dbname() - trans = title.maketrans(reserved_char, replace_char) - title = title.translate(trans) - - if not os.access(dirname, os.W_OK): - _LOG.warning("Can't write technical DB backup for %s" % title) - return - (grampsdb_path, db_code) = os.path.split(dirname) - dotgramps_path = os.path.dirname(grampsdb_path) - zipname = title + time.strftime("_%Y-%m-%d_%H-%M-%S") + ".zip" - zippath = os.path.join(dotgramps_path, zipname) - with zipfile.ZipFile(zippath, 'w') as myzip: - for filename in os.listdir(dirname): - pathname = os.path.join(dirname, filename) - myzip.write(pathname, os.path.join(db_code, filename)) - _LOG.warning("If upgrade and loading the Family Tree works, you can " - "delete the zip file at %s" % - zippath) - - def __check_bdb_version(self, name, force_bsddb_upgrade=False, - force_bsddb_downgrade=False): - """Older version of Berkeley DB can't read data created by a newer - version.""" - bdb_version = db.version() - versionpath = os.path.join(self.path, str(BDBVERSFN)) - # Compare the current version of the database (bsddb_version) with the - # version of the database code (env_version). If it is a downgrade, - # raise an exception because we can't do anything. If they are the same, - # return. If it is an upgrade, raise an exception unless the user has - # already told us we can upgrade. - if os.path.isfile(versionpath): - with open(versionpath, "r") as version_file: - bsddb_version = version_file.read().strip() - env_version = tuple(map(int, bsddb_version[1:-1].split(', '))) - else: - # bsddb version is unknown - bsddb_version = "Unknown" - env_version = "Unknown" -# _LOG.debug("db version %s, program version %s" % (bsddb_version, bdb_version)) - - if env_version == "Unknown" or \ - (env_version[0] < bdb_version[0]) or \ - (env_version[0] == bdb_version[0] and - env_version[1] < bdb_version[1]) or \ - (env_version[0] == bdb_version[0] and - env_version[1] == bdb_version[1] and - env_version[2] < bdb_version[2]): - # an upgrade is needed - if not force_bsddb_upgrade: - _LOG.debug("Bsddb upgrade required from %s to %s" % - (bsddb_version, str(bdb_version))) - clear_lock_file(name) - raise exceptions.BsddbUpgradeRequiredError(bsddb_version, - str(bdb_version)) - if not self.readonly: - _LOG.warning("Bsddb upgrade requested from %s to %s" % - (bsddb_version, str(bdb_version))) - self.update_env_version = True - # Make a backup of the database files anyway - self.__make_zip_backup(name) - elif (env_version[0] > bdb_version[0]) or \ - (env_version[0] == bdb_version[0] and - env_version[1] > bdb_version[1]): - clear_lock_file(name) - raise BsddbDowngradeError(env_version, bdb_version) - elif (env_version[0] == bdb_version[0] and - env_version[1] == bdb_version[1] and - env_version[2] > bdb_version[2]): - # A down-grade may be possible - if not force_bsddb_downgrade: - _LOG.debug("Bsddb downgrade required from %s to %s" % - (bsddb_version, str(bdb_version))) - clear_lock_file(name) - raise exceptions.BsddbDowngradeRequiredError(bsddb_version, - str(bdb_version)) - # Try to do a down-grade - if not self.readonly: - _LOG.warning("Bsddb downgrade requested from %s to %s" % - (bsddb_version, str(bdb_version))) - self.update_env_version = True - # Make a backup of the database files anyway - self.__make_zip_backup(name) - elif env_version == bdb_version: - # Bsddb version is OK - pass - else: - # This can't happen - raise "Comparison between Bsddb version failed" - - def __check_python_version(self, name, force_python_upgrade=False): - """ - The 'pickle' format (may) change with each Python version, see - http://docs.python.org/3.2/library/pickle.html#pickle. Code commits - 21777 and 21778 ensure that when going from python2 to python3, the old - format can be read. However, once the data has been written in the - python3 format, it will not be possible to go back to pyton2. This check - test whether we are changing python versions. If going from 2 to 3 it - warns the user, and allows it if he confirms. When going from 3 to 3, an - error is raised. Because code for python2 did not write the Python - version file, if the file is absent, python2 is assumed. - """ - current_python_version = version_info[0] - versionpath = os.path.join(self.path, "pythonversion.txt") - if os.path.isfile(versionpath): - with open(versionpath, "r") as version_file: - db_python_version = int(version_file.read().strip()) - else: - db_python_version = 2 - - if db_python_version == 3 and current_python_version == 2: - clear_lock_file(name) - raise exceptions.PythonDowngradeError(db_python_version, - current_python_version) - elif db_python_version == 2 and current_python_version > 2: - if not force_python_upgrade: - _LOG.debug("Python upgrade required from %s to %s" % - (db_python_version, current_python_version)) - clear_lock_file(name) - raise exceptions.PythonUpgradeRequiredError(db_python_version, - current_python_version) - # Try to do an upgrade - if not self.readonly: - _LOG.warning("Python upgrade requested from %s to %s" % - (db_python_version, current_python_version)) - self.update_python_version = True - # Make a backup of the database files anyway - self.__make_zip_backup(name) - elif db_python_version == 2 and current_python_version == 2: - pass - - @catch_db_error - def version_supported(self): - dbversion = self.metadata.get(b'version', default=0) - return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION)) - - @catch_db_error - def _need_schema_upgrade(self): - dbversion = self.metadata.get(b'version', default=0) - return not self.readonly and dbversion < _DBVERSION - - def __check_readonly(self, name): - """ - Return True if we don't have read/write access to the database, - otherwise return False (that is, we DO have read/write access) - """ - - # See if we write to the target directory at all? - if not os.access(name, os.W_OK): - return True - - # See if we lack write access to any files in the directory - for base in [FAMILY_TBL, PLACES_TBL, SOURCES_TBL, CITATIONS_TBL, - MEDIA_TBL, EVENTS_TBL, PERSON_TBL, REPO_TBL, - NOTE_TBL, REF_MAP, META]: - path = os.path.join(name, base + DBEXT) - if os.path.isfile(path) and not os.access(path, os.W_OK): - return True - - # All tests passed. Inform caller that we are NOT read only - return False - - @catch_db_error - def load(self, name, callback=None, mode=DBMODE_W, force_schema_upgrade=False, - force_bsddb_upgrade=False, force_bsddb_downgrade=False, - force_python_upgrade=False, update=True, - username=None, password=None): - """ - If update is False: then don't update any files; open read-only - """ - - if self.__check_readonly(name): - mode = DBMODE_R - elif update: - write_lock_file(name) - else: - mode = DBMODE_R - - if self.db_is_open: - self.close() - - self.readonly = mode == DBMODE_R - #super(DbBsddbRead, self).load(name, callback, mode) - if callback: - callback(12) - - # Save full path and base file name - self.full_name = os.path.abspath(name) - self.path = self.full_name - self.brief_name = os.path.basename(name) - - # We use the existence of the person table as a proxy for the database - # being new - if not os.path.exists(os.path.join(self.path, 'person.db')): - self._write_version(name) - - # If we re-enter load with force_python_upgrade True, then we have - # already checked the bsddb version, and then checked python version, - # and are agreeing on the upgrade - if not force_python_upgrade: - self.__check_bdb_version(name, force_bsddb_upgrade, - force_bsddb_downgrade) - - self.__check_python_version(name, force_python_upgrade) - - # Check for pickle upgrade - versionpath = os.path.join(self.path, str(PCKVERSFN)) - # Up to gramps 3.4.x PCKVERSFN was not written - # Gramps 4.2 incorrectly wrote PCKVERSFN = 'Yes' for Python2, so check - # whether python is upgraded - if ((not self.readonly and not self.update_pickle_version) and - (not os.path.isfile(versionpath) or self.update_python_version)): - _LOG.debug("Make backup in case there is a pickle upgrade") - self.__make_zip_backup(name) - self.update_pickle_version = True - - # Check for schema upgrade - versionpath = os.path.join(self.path, str(SCHVERSFN)) - if os.path.isfile(versionpath): - with open(versionpath, "r") as version_file: - schema_version = int(version_file.read().strip()) - else: - schema_version = 0 - if not self.readonly and schema_version < _DBVERSION and \ - force_schema_upgrade: - _LOG.debug("Make backup in case there is a schema upgrade") - self.__make_zip_backup(name) - - # Set up database environment - self.env = db.DBEnv() - self.env.set_cachesize(0, DBCACHE) - - # These env settings are only needed for Txn environment - self.env.set_lk_max_locks(DBLOCKS) - self.env.set_lk_max_objects(DBOBJECTS) - - # Set to auto remove stale logs - self._set_auto_remove() - - # Set not to flush to disk synchronous, this greatly speeds up - # database changes, but comes at the cause of loss of durability, so - # power loss might cause a need to run db recovery, see BSDDB manual - ## NOTE: due to pre 4.8 bsddb bug it is needed to set this flag before - ## open of env, #16492 - http://download.oracle.com/docs/cd/E17076_02/html/installation/changelog_4_8.html - self.env.set_flags(db.DB_TXN_WRITE_NOSYNC, 1) - - # The DB_PRIVATE flag must go if we ever move to multi-user setup - env_flags = db.DB_CREATE | db.DB_PRIVATE |\ - db.DB_INIT_MPOOL - if not self.readonly: - env_flags |= db.DB_INIT_LOG | db.DB_INIT_TXN - # As opposed to before, we always try recovery on databases - env_flags |= db.DB_RECOVER - - # Environment name is now based on the filename - env_name = name - - try: - self.env.open(env_name, env_flags) - except Exception as msg: - _LOG.warning("Error opening db environment: " + str(msg)) - try: - self.__close_early() - except: - pass - raise DbEnvironmentError(msg) - - if not self.readonly: - self.env.txn_checkpoint() - - if callback: - callback(25) - - # Process metadata - self.metadata = self.__open_shelf(self.full_name, META) - - # If we cannot work with this DB version, - # it makes no sense to go further - if not self.version_supported(): - tree_vers = self.metadata.get(b'version', default=0) - self.__close_early() - raise DbVersionError(tree_vers, _MINVERSION, _DBVERSION) - - gstats = self.metadata.get(b'gender_stats', default=None) - - # Ensure version info in metadata - if not self.readonly: - # Start transaction - with BSDDBTxn(self.env, self.metadata) as txn: - if gstats is None: - # New database. Set up the current version. - #self.metadata.put(b'version', _DBVERSION, txn=the_txn) - txn.put(b'version', _DBVERSION) - txn.put(b'upgraded', 'Yes') - elif b'version' not in self.metadata: - # Not new database, but the version is missing. - # Use 0, but it is likely to fail anyway. - txn.put(b'version', 0) - - self.genderStats = GenderStats(gstats) - - # Open main tables in gramps database - db_maps = [ - ("family_map", FAMILY_TBL, db.DB_HASH), - ("place_map", PLACES_TBL, db.DB_HASH), - ("source_map", SOURCES_TBL, db.DB_HASH), - ("citation_map", CITATIONS_TBL, db.DB_HASH), - ("media_map", MEDIA_TBL, db.DB_HASH), - ("event_map", EVENTS_TBL, db.DB_HASH), - ("person_map", PERSON_TBL, db.DB_HASH), - ("repository_map", REPO_TBL, db.DB_HASH), - ("note_map", NOTE_TBL, db.DB_HASH), - ("tag_map", TAG_TBL, db.DB_HASH), - ("reference_map", REF_MAP, db.DB_BTREE), - ] - - dbflags = DBFLAGS_R if self.readonly else DBFLAGS_O - for (dbmap, dbname, dbtype) in db_maps: - _db = self.__open_shelf(self.full_name, dbname, dbtype) - setattr(self, dbmap, _db) - - if callback: - callback(37) - - # Open name grouping database - self.name_group = self.__open_db(self.full_name, NAME_GROUP, - db.DB_HASH, db.DB_DUP) - - # We have now successfully opened the database, so if the BSDDB version - # has changed, we update the DBSDB version file. - - if self.update_env_version: - versionpath = os.path.join(name, BDBVERSFN) - with open(versionpath, "w") as version_file: - version = str(db.version()) - version_file.write(version) - _LOG.debug("Updated bsddb version file to %s" % str(db.version())) - - if self.update_python_version: - versionpath = os.path.join(name, "pythonversion.txt") - version = str(version_info[0]) - _LOG.debug("Updated python version file to %s" % version) - with open(versionpath, "w") as version_file: - version_file.write(version) - - # Here we take care of any changes in the tables related to new code. - # If secondary indices change, then they should removed - # or rebuilt by upgrade as well. In any case, the - # self.secondary_connected flag should be set accordingly. - if self.update_pickle_version: - from . import upgrade - UpdateCallback.__init__(self, callback) - upgrade.gramps_upgrade_pickle(self) - versionpath = os.path.join(name, str(PCKVERSFN)) - with open(versionpath, "w") as version_file: - version = "Yes" - version_file.write(version) - _LOG.debug("Updated pickle version file to %s" % str(version)) - - self.__load_metadata() - - if self._need_schema_upgrade(): - oldschema = self.metadata.get(b'version', default=0) - newschema = _DBVERSION - _LOG.debug("Schema upgrade required from %s to %s" % - (oldschema, newschema)) - if force_schema_upgrade == True: - self._gramps_upgrade(callback) - versionpath = os.path.join(name, str(SCHVERSFN)) - with open(versionpath, "w") as version_file: - version = str(_DBVERSION) - version_file.write(version) - _LOG.debug("Updated schema version file to %s" % str(version)) - else: - self.__close_early() - clear_lock_file(name) - raise DbUpgradeRequiredError(oldschema, newschema) - - if callback: - callback(50) - - # Connect secondary indices - if not self.secondary_connected: - self.__connect_secondary() - - if callback: - callback(75) - - # Open undo database - self.__open_undodb() - self.db_is_open = True - - if callback: - callback(87) - - self.abort_possible = True - return 1 - - def __open_undodb(self): - """ - Open the undo database - """ - if not self.readonly: - self.undolog = os.path.join(self.full_name, DBUNDOFN) - self.undodb = DbUndo(self, self.undolog) - self.undodb.open() - - def __close_undodb(self): - if not self.readonly: - try: - self.undodb.close() - except db.DBNoSuchFileError: - pass - - def get_undodb(self): - """ - Return the database that keeps track of Undo/Redo operations. - """ - return self.undodb - - def __load_metadata(self): - # name display formats - self.name_formats = self.metadata.get(b'name_formats', default=[]) - # upgrade formats if they were saved in the old way - for format_ix in range(len(self.name_formats)): - format = self.name_formats[format_ix] - if len(format) == 3: - format = format + (True,) - self.name_formats[format_ix] = format - - # database owner - try: - owner_data = self.metadata.get(b'researcher') - if owner_data: - if len(owner_data[0]) == 7: # Pre-3.3 format - owner_data = upgrade_researcher(owner_data) - self.owner.unserialize(owner_data) - except ImportError: #handle problems with pre-alpha 3.0 - pass - - # bookmarks - def meta(key): - return self.metadata.get(key, default=[]) - - self.bookmarks.set(meta(b'bookmarks')) - self.family_bookmarks.set(meta(b'family_bookmarks')) - self.event_bookmarks.set(meta(b'event_bookmarks')) - self.source_bookmarks.set(meta(b'source_bookmarks')) - self.citation_bookmarks.set(meta(b'citation_bookmarks')) - self.repo_bookmarks.set(meta(b'repo_bookmarks')) - self.media_bookmarks.set(meta(b'media_bookmarks')) - self.place_bookmarks.set(meta(b'place_bookmarks')) - self.note_bookmarks.set(meta(b'note_bookmarks')) - - # Custom type values - self.event_names = set(meta(b'event_names')) - self.family_attributes = set(meta(b'fattr_names')) - self.individual_attributes = set(meta(b'pattr_names')) - self.source_attributes = set(meta(b'sattr_names')) - self.marker_names = set(meta(b'marker_names')) - self.child_ref_types = set(meta(b'child_refs')) - self.family_rel_types = set(meta(b'family_rels')) - self.event_role_names = set(meta(b'event_roles')) - self.name_types = set(meta(b'name_types')) - self.origin_types = set(meta(b'origin_types')) - self.repository_types = set(meta(b'repo_types')) - self.note_types = set(meta(b'note_types')) - self.source_media_types = set(meta(b'sm_types')) - self.url_types = set(meta(b'url_types')) - self.media_attributes = set(meta(b'mattr_names')) - self.event_attributes = set(meta(b'eattr_names')) - self.place_types = set(meta(b'place_types')) - - # surname list - self.surname_list = meta(b'surname_list') - - def __connect_secondary(self): - """ - Connect or creates secondary index tables. - - It assumes that the tables either exist and are in the right - format or do not exist (in which case they get created). - - It is the responsibility of upgrade code to either create - or remove invalid secondary index tables. - """ - - # index tables used just for speeding up searches - self.surnames = self.__open_db(self.full_name, SURNAMES, db.DB_BTREE, - db.DB_DUP | db.DB_DUPSORT) - - db_maps = [ - ("id_trans", IDTRANS, db.DB_HASH, 0), - ("fid_trans", FIDTRANS, db.DB_HASH, 0), - ("eid_trans", EIDTRANS, db.DB_HASH, 0), - ("pid_trans", PIDTRANS, db.DB_HASH, 0), - ("sid_trans", SIDTRANS, db.DB_HASH, 0), - ("cid_trans", CIDTRANS, db.DB_HASH, 0), - ("oid_trans", OIDTRANS, db.DB_HASH, 0), - ("rid_trans", RIDTRANS, db.DB_HASH, 0), - ("nid_trans", NIDTRANS, db.DB_HASH, 0), - ("tag_trans", TAGTRANS, db.DB_HASH, 0), - ("parents", PPARENT, db.DB_HASH, 0), - ("reference_map_primary_map", REF_PRI, db.DB_BTREE, 0), - ("reference_map_referenced_map", REF_REF, db.DB_BTREE, db.DB_DUPSORT), - ] - - for (dbmap, dbname, dbtype, dbflags) in db_maps: - _db = self.__open_db(self.full_name, dbname, dbtype, - db.DB_DUP | dbflags) - setattr(self, dbmap, _db) - - if not self.readonly: - - assoc = [ - (self.person_map, self.surnames, find_byte_surname), - (self.person_map, self.id_trans, find_idmap), - (self.family_map, self.fid_trans, find_idmap), - (self.event_map, self.eid_trans, find_idmap), - (self.place_map, self.pid_trans, find_idmap), - (self.place_map, self.parents, find_parent), - (self.source_map, self.sid_trans, find_idmap), - (self.citation_map, self.cid_trans, find_idmap), - (self.media_map, self.oid_trans, find_idmap), - (self.repository_map, self.rid_trans, find_idmap), - (self.note_map, self.nid_trans, find_idmap), - (self.tag_map, self.tag_trans, find_idmap), - (self.reference_map, self.reference_map_primary_map, - find_primary_handle), - (self.reference_map, self.reference_map_referenced_map, - find_referenced_handle), - ] - - flags = DBFLAGS_R if self.readonly else DBFLAGS_O - for (dbmap, a_map, a_find) in assoc: - dbmap.associate(a_map, a_find, flags=flags) - - self.secondary_connected = True - self.smap_index = len(self.source_map) - self.cmap_index = len(self.citation_map) - self.emap_index = len(self.event_map) - self.pmap_index = len(self.person_map) - self.fmap_index = len(self.family_map) - self.lmap_index = len(self.place_map) - self.omap_index = len(self.media_map) - self.rmap_index = len(self.repository_map) - self.nmap_index = len(self.note_map) - - @catch_db_error - def rebuild_secondary(self, callback=None): - if self.readonly: - return - - table_flags = DBFLAGS_O - - # remove existing secondary indices - - items = [ - ( self.id_trans, IDTRANS ), - ( self.surnames, SURNAMES ), - ( self.fid_trans, FIDTRANS ), - ( self.pid_trans, PIDTRANS ), - ( self.oid_trans, OIDTRANS ), - ( self.eid_trans, EIDTRANS ), - ( self.rid_trans, RIDTRANS ), - ( self.nid_trans, NIDTRANS ), - ( self.cid_trans, CIDTRANS ), - ( self.tag_trans, TAGTRANS ), - ( self.parents, PPARENT ), - ( self.reference_map_primary_map, REF_PRI), - ( self.reference_map_referenced_map, REF_REF), - ] - - index = 1 - for (database, name) in items: - database.close() - _db = db.DB(self.env) - try: - _db.remove(_mkname(self.full_name, name), name) - except db.DBNoSuchFileError: - pass - if callback: - callback(index) - index += 1 - - if callback: - callback(11) - - # Set flag saying that we have removed secondary indices - # and then call the creating routine - self.secondary_connected = False - self.__connect_secondary() - if callback: - callback(12) - - @catch_db_error - def find_place_child_handles(self, handle): - """ - Find all child places having the given place as the primary parent. - """ - parent_cur = self.get_place_parent_cursor() - - try: - ret = parent_cur.set(handle.encode('utf-8')) - except: - ret = None - - while (ret is not None): - (key, data) = ret - - ### FIXME: this is a dirty hack that works without no - ### sensible explanation. For some reason, for a readonly - ### database, secondary index returns a primary table key - ### corresponding to the data, not the data. - if self.readonly: - data = self.place_map.get(data) - else: - data = pickle.loads(data) - - yield data[0] - ret = parent_cur.next_dup() - - parent_cur.close() - - @catch_db_error - def find_backlink_handles(self, handle, include_classes=None): - """ - Find all objects that hold a reference to the object handle. - - Returns an interator over a list of (class_name, handle) tuples. - - :param handle: handle of the object to search for. - :type handle: database handle - :param include_classes: list of class names to include in the results. - Default: None means include all classes. - :type include_classes: list of class names - - Note that this is a generator function, it returns a iterator for - use in loops. If you want a list of the results use:: - - result_list = list(find_backlink_handles(handle)) - """ - # Use the secondary index to locate all the reference_map entries - # that include a reference to the object we are looking for. - referenced_cur = self._get_reference_map_referenced_cursor() - - try: - ret = referenced_cur.set(handle.encode('utf-8')) - except: - ret = None - - while (ret is not None): - (key, data) = ret - - # data values are of the form: - # ((primary_object_class_name, primary_object_handle), - # (referenced_object_class_name, referenced_object_handle)) - # so we need the first tuple to give us the type to compare - - ### FIXME: this is a dirty hack that works without no - ### sensible explanation. For some reason, for a readonly - ### database, secondary index returns a primary table key - ### corresponding to the data, not the data. - if self.readonly: - data = self.reference_map.get(data) - else: - data = pickle.loads(data) - - key, handle = data[0][:2] - name = KEY_TO_CLASS_MAP[key] - assert name == KEY_TO_CLASS_MAP[data[0][0]] - assert handle == data[0][1] - if (include_classes is None or - name in include_classes): - yield (name, handle) - - ret = referenced_cur.next_dup() - - referenced_cur.close() - - def _delete_primary_from_reference_map(self, handle, transaction, txn=None): - """ - Remove all references to the primary object from the reference_map. - handle should be utf-8 - """ - primary_cur = self._get_reference_map_primary_cursor() - - try: - ret = primary_cur.set(handle) - except: - ret = None - - remove_list = set() - while (ret is not None): - (key, data) = ret - - # data values are of the form: - # ((primary_object_class_name, primary_object_handle), - # (referenced_object_class_name, referenced_object_handle)) - - # so we need the second tuple give us a reference that we can - # combine with the primary_handle to get the main key. - main_key = (handle.decode('utf-8'), pickle.loads(data)[1][1]) - - # The trick is not to remove while inside the cursor, - # but collect them all and remove after the cursor is closed - remove_list.add(main_key) - - ret = primary_cur.next_dup() - - primary_cur.close() - - # Now that the cursor is closed, we can remove things - for main_key in remove_list: - self.__remove_reference(main_key, transaction, txn) - - def _update_reference_map(self, obj, transaction, txn=None): - """ - If txn is given, then changes are written right away using txn. - """ - - # Add references to the reference_map for all primary object referenced - # from the primary object 'obj' or any of its secondary objects. - handle = obj.handle - existing_references = set() - primary_cur = self._get_reference_map_primary_cursor() - key = handle.encode('utf-8') - try: - ret = primary_cur.set(key) - except: - ret = None - - while (ret is not None): - (key, data) = ret - # data values are of the form: - # ((primary_object_class_name, primary_object_handle), - # (referenced_object_class_name, referenced_object_handle)) - # so we need the second tuple give us a reference that we can - # compare with what is returned from - # get_referenced_handles_recursively - - # secondary DBs are not DBShelf's, so we need to do pickling - # and unpickling ourselves here - existing_reference = pickle.loads(data)[1] - existing_references.add((KEY_TO_CLASS_MAP[existing_reference[0]], - existing_reference[1])) - ret = primary_cur.next_dup() - primary_cur.close() - - # Once we have the list of rows that already have a reference - # we need to compare it with the list of objects that are - # still references from the primary object. - current_references = set(obj.get_referenced_handles_recursively()) - no_longer_required_references = existing_references.difference( - current_references) - new_references = current_references.difference(existing_references) - - # handle addition of new references - for (ref_class_name, ref_handle) in new_references: - data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__], handle), - (CLASS_TO_KEY_MAP[ref_class_name], ref_handle),) - self.__add_reference((handle, ref_handle), data, transaction, txn) - - # handle deletion of old references - for (ref_class_name, ref_handle) in no_longer_required_references: - try: - self.__remove_reference((handle, ref_handle), transaction, txn) - except: - # ignore missing old reference - pass - - def __remove_reference(self, key, transaction, txn): - """ - Remove the reference specified by the key, preserving the change in - the passed transaction. - """ - if isinstance(key, tuple): - #create a byte string key, first validity check in python 3! - for val in key: - if isinstance(val, bytes): - raise DbError(_('An attempt is made to save a reference key ' - 'which is partly bytecode, this is not allowed.\n' - 'Key is %s') % str(key)) - key = str(key) - key = key.encode('utf-8') - if not self.readonly: - if not transaction.batch: - old_data = self.reference_map.get(key, txn=txn) - transaction.add(REFERENCE_KEY, TXNDEL, key, old_data, None) - #transaction.reference_del.append(str(key)) - self.reference_map.delete(key, txn=txn) - - def __add_reference(self, key, data, transaction, txn): - """ - Add the reference specified by the key and the data, preserving the - change in the passed transaction. - """ - if isinstance(key, tuple): - #create a string key - key = str(key) - key = key.encode('utf-8') - if self.readonly or not key: - return - - self.reference_map.put(key, data, txn=txn) - if not transaction.batch: - transaction.add(REFERENCE_KEY, TXNADD, key, None, data) - #transaction.reference_add.append((str(key), data)) - - @catch_db_error - def reindex_reference_map(self, callback): - """ - Reindex all primary records in the database. - - This will be a slow process for large databases. - """ - - # First, remove the reference map and related tables - - db_maps = [ - ("reference_map_referenced_map", REF_REF), - ("reference_map_primary_map", REF_PRI), - ("reference_map", REF_MAP), - ] - - for index, (dbmap, dbname) in enumerate(db_maps): - getattr(self, dbmap).close() - _db = db.DB(self.env) - try: - _db.remove(_mkname(self.full_name, dbname), dbname) - except db.DBNoSuchFileError: - pass - callback(index+1) - - # Open reference_map and primary map - self.reference_map = self.__open_shelf(self.full_name, REF_MAP, - dbtype=db.DB_BTREE) - - self.reference_map_primary_map = self.__open_db(self.full_name, - REF_PRI, db.DB_BTREE, db.DB_DUP) - - self.reference_map.associate(self.reference_map_primary_map, - find_primary_handle, DBFLAGS_O) - - # Make a tuple of the functions and classes that we need for - # each of the primary object tables. - - with DbTxn(_("Rebuild reference map"), self, batch=True, - no_magic=True) as transaction: - callback(4) - - primary_table = ( - (self.get_person_cursor, Person), - (self.get_family_cursor, Family), - (self.get_event_cursor, Event), - (self.get_place_cursor, Place), - (self.get_source_cursor, Source), - (self.get_citation_cursor, Citation), - (self.get_media_cursor, Media), - (self.get_repository_cursor, Repository), - (self.get_note_cursor, Note), - (self.get_tag_cursor, Tag), - ) - - # Now we use the functions and classes defined above - # to loop through each of the primary object tables. - - for cursor_func, class_func in primary_table: - logging.info("Rebuilding %s reference map" % - class_func.__name__) - with cursor_func() as cursor: - for found_handle, val in cursor: - obj = class_func() - obj.unserialize(val) - with BSDDBTxn(self.env) as txn: - self._update_reference_map(obj, - transaction, txn.txn) - - callback(5) - - self.reference_map_referenced_map = self.__open_db(self.full_name, - REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) - - flags = DBFLAGS_R if self.readonly else DBFLAGS_O - self.reference_map.associate(self.reference_map_referenced_map, - find_referenced_handle, flags=flags) - callback(6) - - def __close_metadata(self): - if not self.readonly: - # Start transaction - with BSDDBTxn(self.env, self.metadata) as txn: - - # name display formats - txn.put(b'name_formats', self.name_formats) - - # database owner - owner_data = self.owner.serialize() - txn.put(b'researcher', owner_data) - - # bookmarks - txn.put(b'bookmarks', self.bookmarks.get()) - txn.put(b'family_bookmarks', self.family_bookmarks.get()) - txn.put(b'event_bookmarks', self.event_bookmarks.get()) - txn.put(b'source_bookmarks', self.source_bookmarks.get()) - txn.put(b'citation_bookmarks', self.citation_bookmarks.get()) - txn.put(b'place_bookmarks', self.place_bookmarks.get()) - txn.put(b'repo_bookmarks', self.repo_bookmarks.get()) - txn.put(b'media_bookmarks', self.media_bookmarks.get()) - txn.put(b'note_bookmarks', self.note_bookmarks.get()) - - # gender stats - txn.put(b'gender_stats', self.genderStats.save_stats()) - - # Custom type values - txn.put(b'event_names', list(self.event_names)) - txn.put(b'fattr_names', list(self.family_attributes)) - txn.put(b'pattr_names', list(self.individual_attributes)) - txn.put(b'sattr_names', list(self.source_attributes)) - txn.put(b'marker_names', list(self.marker_names)) - txn.put(b'child_refs', list(self.child_ref_types)) - txn.put(b'family_rels', list(self.family_rel_types)) - txn.put(b'event_roles', list(self.event_role_names)) - txn.put(b'name_types', list(self.name_types)) - txn.put(b'origin_types', list(self.origin_types)) - txn.put(b'repo_types', list(self.repository_types)) - txn.put(b'note_types', list(self.note_types)) - txn.put(b'sm_types', list(self.source_media_types)) - txn.put(b'url_types', list(self.url_types)) - txn.put(b'mattr_names', list(self.media_attributes)) - txn.put(b'eattr_names', list(self.event_attributes)) - txn.put(b'place_types', list(self.place_types)) - - # name display formats - txn.put(b'surname_list', self.surname_list) - - self.metadata.close() - - def __close_early(self): - """ - Bail out if the incompatible version is discovered: - * close cleanly to not damage data/env - """ - if hasattr(self, 'metadata') and self.metadata: - self.metadata.close() - self.env.close() - self.metadata = None - self.env = None - self.db_is_open = False - - @catch_db_error - def close(self, update=True, user=None): - """ - Close the database. - if update is False, don't change access times, etc. - """ - if not self.db_is_open: - return - if self.txn: - self.transaction_abort(self.transaction) - if not self.readonly: - self.env.txn_checkpoint() - - self.__close_metadata() - self.name_group.close() - self.surnames.close() - self.parents.close() - self.id_trans.close() - self.fid_trans.close() - self.eid_trans.close() - self.rid_trans.close() - self.nid_trans.close() - self.oid_trans.close() - self.sid_trans.close() - self.cid_trans.close() - self.pid_trans.close() - self.tag_trans.close() - self.reference_map_primary_map.close() - self.reference_map_referenced_map.close() - self.reference_map.close() - self.secondary_connected = False - - # primary databases must be closed after secondary indexes, or - # we run into problems with any active cursors. - self.person_map.close() - self.family_map.close() - self.repository_map.close() - self.note_map.close() - self.place_map.close() - self.source_map.close() - self.citation_map.close() - self.media_map.close() - self.event_map.close() - self.tag_map.close() - self.env.close() - self.__close_undodb() - - self.person_map = None - self.family_map = None - self.repository_map = None - self.note_map = None - self.place_map = None - self.source_map = None - self.citation_map = None - self.media_map = None - self.event_map = None - self.tag_map = None - self.surnames = None - self.env = None - self.metadata = None - self.db_is_open = False - self.surname_list = None - - DbBsddbRead.close(self) - - self.person_map = None - self.family_map = None - self.repository_map = None - self.note_map = None - self.place_map = None - self.source_map = None - self.citation_map = None - self.media_map = None - self.event_map = None - self.tag_map = None - self.reference_map_primary_map = None - self.reference_map_referenced_map = None - self.reference_map = None - self.undo_callback = None - self.redo_callback = None - self.undo_history_callback = None - self.undodb = None - - try: - clear_lock_file(self.get_save_path()) - except IOError: - pass - - def __add_object(self, obj, transaction, find_next_func, commit_func): - if find_next_func and not obj.gramps_id: - obj.gramps_id = find_next_func() - if not obj.handle: - obj.handle = create_id() - commit_func(obj, transaction) - return obj.handle - - def add_person(self, person, transaction, set_gid=True): - """ - Add a Person to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - handle = self.__add_object(person, transaction, - self.find_next_person_gramps_id if set_gid else None, - self.commit_person) - return handle - - def add_family(self, family, transaction, set_gid=True): - """ - Add a Family to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(family, transaction, - self.find_next_family_gramps_id if set_gid else None, - self.commit_family) - - def add_source(self, source, transaction, set_gid=True): - """ - Add a Source to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(source, transaction, - self.find_next_source_gramps_id if set_gid else None, - self.commit_source) - - def add_citation(self, citation, transaction, set_gid=True): - """ - Add a Citation to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(citation, transaction, - self.find_next_citation_gramps_id if set_gid else None, - self.commit_citation) - - def add_event(self, event, transaction, set_gid=True): - """ - Add an Event to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - if event.type.is_custom(): - self.event_names.add(str(event.type)) - return self.__add_object(event, transaction, - self.find_next_event_gramps_id if set_gid else None, - self.commit_event) - - def add_place(self, place, transaction, set_gid=True): - """ - Add a Place to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(place, transaction, - self.find_next_place_gramps_id if set_gid else None, - self.commit_place) - - def add_media(self, media, transaction, set_gid=True): - """ - Add a Media to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(media, transaction, - self.find_next_media_gramps_id if set_gid else None, - self.commit_media) - - def add_repository(self, obj, transaction, set_gid=True): - """ - Add a Repository to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(obj, transaction, - self.find_next_repository_gramps_id if set_gid else None, - self.commit_repository) - - def add_note(self, obj, transaction, set_gid=True): - """ - Add a Note to the database, assigning internal IDs if they have - not already been defined. - - If not set_gid, then gramps_id is not set. - """ - return self.__add_object(obj, transaction, - self.find_next_note_gramps_id if set_gid else None, - self.commit_note) - - def add_tag(self, obj, transaction): - """ - Add a Tag to the database, assigning a handle if it has not already - been defined. - """ - return self.__add_object(obj, transaction, None, self.commit_tag) - - def __do_remove(self, handle, transaction, data_map, key): - if self.readonly or not handle: - return - - handle = handle.encode('utf-8') - if transaction.batch: - with BSDDBTxn(self.env, data_map) as txn: - self._delete_primary_from_reference_map(handle, transaction, - txn=txn.txn) - txn.delete(handle) - else: - self._delete_primary_from_reference_map(handle, transaction, - txn=self.txn) - old_data = data_map.get(handle, txn=self.txn) - data_map.delete(handle, txn=self.txn) - transaction.add(key, TXNDEL, handle, old_data, None) - - def remove_person(self, handle, transaction): - """ - Remove the Person specified by the database handle from the database, - preserving the change in the passed transaction. - """ - - if self.readonly or not handle: - return - person = self.get_person_from_handle(handle) - self.genderStats.uncount_person (person) - self.remove_from_surname_list(person) - handle = handle.encode('utf-8') - if transaction.batch: - with BSDDBTxn(self.env, self.person_map) as txn: - self._delete_primary_from_reference_map(handle, transaction, - txn=txn.txn) - txn.delete(handle) - else: - self._delete_primary_from_reference_map(handle, transaction, - txn=self.txn) - self.person_map.delete(handle, txn=self.txn) - transaction.add(PERSON_KEY, TXNDEL, handle, person.serialize(), None) - - def remove_source(self, handle, transaction): - """ - Remove the Source specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.source_map, - SOURCE_KEY) - - def remove_citation(self, handle, transaction): - """ - Remove the Citation specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.citation_map, - CITATION_KEY) - - def remove_event(self, handle, transaction): - """ - Remove the Event specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.event_map, - EVENT_KEY) - - def remove_media(self, handle, transaction): - """ - Remove the MediaPerson specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.media_map, - MEDIA_KEY) - - def remove_place(self, handle, transaction): - """ - Remove the Place specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.place_map, - PLACE_KEY) - - def remove_family(self, handle, transaction): - """ - Remove the Family specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.family_map, - FAMILY_KEY) - - def remove_repository(self, handle, transaction): - """ - Remove the Repository specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.repository_map, - REPOSITORY_KEY) - - def remove_note(self, handle, transaction): - """ - Remove the Note specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.note_map, - NOTE_KEY) - - def remove_tag(self, handle, transaction): - """ - Remove the Tag specified by the database handle from the - database, preserving the change in the passed transaction. - """ - self.__do_remove(handle, transaction, self.tag_map, - TAG_KEY) - - @catch_db_error - def set_name_group_mapping(self, name, group): - if not self.readonly: - # Start transaction - with BSDDBTxn(self.env, self.name_group) as txn: - sname = name.encode('utf-8') - data = txn.get(sname) - if data is not None: - txn.delete(sname) - if group is not None: - txn.put(sname, group.encode('utf-8')) - if group is None: - grouppar = '' - else: - grouppar = group - self.emit('person-groupname-rebuild', (name, grouppar)) - - @catch_db_error - def __build_surname_list(self): - """ - Build surname list for use in autocompletion - This is a list of unicode objects, which are decoded from the utf-8 in - bsddb - """ - self.surname_list = sorted( - [s.decode('utf-8') for s in set(self.surnames.keys())], - key=glocale.sort_key) - - def add_to_surname_list(self, person, batch_transaction): - """ - Add surname to surname list - """ - if batch_transaction: - return - name = find_surname_name(person.handle, - person.get_primary_name().serialize()) - i = bisect.bisect(self.surname_list, name) - if 0 < i <= len(self.surname_list): - if self.surname_list[i-1] != name: - self.surname_list.insert(i, name) - else: - self.surname_list.insert(i, name) - - @catch_db_error - def remove_from_surname_list(self, person): - """ - Check whether there are persons with the same surname left in - the database. - - If not then we need to remove the name from the list. - The function must be overridden in the derived class. - """ - uname = find_surname_name(person.handle, - person.get_primary_name().serialize()) - name = uname.encode('utf-8') - try: - cursor = self.surnames.cursor(txn=self.txn) - cursor_position = cursor.set(name) - if cursor_position is not None and cursor.count() == 1: - #surname list contains unicode objects - i = bisect.bisect(self.surname_list, uname) - if 0 <= i-1 < len(self.surname_list): - del self.surname_list[i-1] - except db.DBError as err: - if str(err) == "(0, 'DB object has been closed')": - pass # A batch transaction closes the surnames db table. - else: - raise - finally: - if 'cursor' in locals(): - cursor.close() - - def _commit_base(self, obj, data_map, key, transaction, change_time): - """ - Commit the specified object to the database, storing the changes as - part of the transaction. - """ - if self.readonly or not obj or not obj.handle: - return - - obj.change = int(change_time or time.time()) - handle = obj.handle - handle = handle.encode('utf-8') - - self._update_reference_map(obj, transaction, self.txn) - - new_data = obj.serialize() - old_data = None - if not transaction.batch: - old_data = data_map.get(handle, txn=self.txn) - op = TXNUPD if old_data else TXNADD - transaction.add(key, op, handle, old_data, new_data) - data_map.put(handle, new_data, txn=self.txn) - return old_data - - def commit_person(self, person, transaction, change_time=None): - """ - Commit the specified Person to the database, storing the changes as - part of the transaction. - """ - old_data = self._commit_base( - person, self.person_map, PERSON_KEY, transaction, change_time) - - if old_data: - old_person = Person(old_data) - - # Update gender statistics if necessary - if (old_person.gender != person.gender or - old_person.primary_name.first_name != - person.primary_name.first_name): - - self.genderStats.uncount_person(old_person) - self.genderStats.count_person(person) - - # Update surname list if necessary - if (find_surname_name(old_person.handle, - old_person.primary_name.serialize()) != - find_surname_name(person.handle, - person.primary_name.serialize())): - self.remove_from_surname_list(old_person) - self.add_to_surname_list(person, transaction.batch) - else: - self.genderStats.count_person(person) - self.add_to_surname_list(person, transaction.batch) - - self.individual_attributes.update( - [str(attr.type) for attr in person.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - self.event_role_names.update([str(eref.role) - for eref in person.event_ref_list - if eref.role.is_custom()]) - - self.name_types.update([str(name.type) - for name in ([person.primary_name] - + person.alternate_names) - if name.type.is_custom()]) - all_surn = [] # new list we will use for storage - all_surn += person.primary_name.get_surname_list() - for asurname in person.alternate_names: - all_surn += asurname.get_surname_list() - self.origin_types.update([str(surn.origintype) for surn in all_surn - if surn.origintype.is_custom()]) - all_surn = None - - self.url_types.update([str(url.type) for url in person.urls - if url.type.is_custom()]) - - attr_list = [] - for mref in person.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - def commit_media(self, obj, transaction, change_time=None): - """ - Commit the specified Media to the database, storing the changes - as part of the transaction. - """ - self._commit_base(obj, self.media_map, MEDIA_KEY, - transaction, change_time) - - self.media_attributes.update( - [str(attr.type) for attr in obj.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - def commit_source(self, source, transaction, change_time=None): - """ - Commit the specified Source to the database, storing the changes as - part of the transaction. - """ - self._commit_base(source, self.source_map, SOURCE_KEY, - transaction, change_time) - - self.source_media_types.update( - [str(ref.media_type) for ref in source.reporef_list - if ref.media_type.is_custom()]) - - attr_list = [] - for mref in source.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - self.source_attributes.update( - [str(attr.type) for attr in source.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - def commit_citation(self, citation, transaction, change_time=None): - """ - Commit the specified Citation to the database, storing the changes as - part of the transaction. - """ - self._commit_base(citation, self.citation_map, CITATION_KEY, - transaction, change_time) - - attr_list = [] - for mref in citation.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - self.source_attributes.update( - [str(attr.type) for attr in citation.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - def commit_place(self, place, transaction, change_time=None): - """ - Commit the specified Place to the database, storing the changes as - part of the transaction. - """ - self._commit_base(place, self.place_map, PLACE_KEY, - transaction, change_time) - - if place.get_type().is_custom(): - self.place_types.add(str(place.get_type())) - - self.url_types.update([str(url.type) for url in place.urls - if url.type.is_custom()]) - - attr_list = [] - for mref in place.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - def commit_event(self, event, transaction, change_time=None): - """ - Commit the specified Event to the database, storing the changes as - part of the transaction. - """ - self._commit_base(event, self.event_map, EVENT_KEY, - transaction, change_time) - - self.event_attributes.update( - [str(attr.type) for attr in event.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - if event.type.is_custom(): - self.event_names.add(str(event.type)) - - attr_list = [] - for mref in event.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - def commit_family(self, family, transaction, change_time=None): - """ - Commit the specified Family to the database, storing the changes as - part of the transaction. - """ - self._commit_base(family, self.family_map, FAMILY_KEY, - transaction, change_time) - - self.family_attributes.update( - [str(attr.type) for attr in family.attribute_list - if attr.type.is_custom() and str(attr.type)]) - - rel_list = [] - for ref in family.child_ref_list: - if ref.frel.is_custom(): - rel_list.append(str(ref.frel)) - if ref.mrel.is_custom(): - rel_list.append(str(ref.mrel)) - self.child_ref_types.update(rel_list) - - self.event_role_names.update( - [str(eref.role) for eref in family.event_ref_list - if eref.role.is_custom()]) - - if family.type.is_custom(): - self.family_rel_types.add(str(family.type)) - - attr_list = [] - for mref in family.media_list: - attr_list += [str(attr.type) for attr in mref.attribute_list - if attr.type.is_custom() and str(attr.type)] - self.media_attributes.update(attr_list) - - def commit_repository(self, repository, transaction, change_time=None): - """ - Commit the specified Repository to the database, storing the changes - as part of the transaction. - """ - self._commit_base(repository, self.repository_map, REPOSITORY_KEY, - transaction, change_time) - - if repository.type.is_custom(): - self.repository_types.add(str(repository.type)) - - self.url_types.update([str(url.type) for url in repository.urls - if url.type.is_custom()]) - - def commit_note(self, note, transaction, change_time=None): - """ - Commit the specified Note to the database, storing the changes as part - of the transaction. - """ - self._commit_base(note, self.note_map, NOTE_KEY, - transaction, change_time) - - if note.type.is_custom(): - self.note_types.add(str(note.type)) - - def commit_tag(self, tag, transaction, change_time=None): - """ - Commit the specified Tag to the database, storing the changes as part - of the transaction. - """ - self._commit_base(tag, self.tag_map, TAG_KEY, - transaction, change_time) - - def get_from_handle(self, handle, class_type, data_map): - if handle is None: - raise HandleError('Handle is None') - if not handle: - raise HandleError('Handle is empty') - data = data_map.get(handle.encode('utf-8'), txn=self.txn) - if data: - newobj = class_type() - newobj.unserialize(data) - return newobj - raise HandleError('Handle %s not found' % handle) - - @catch_db_error - def transaction_begin(self, transaction): - """ - Prepare the database for the start of a new Transaction. - - Supported transaction parameters: - - no_magic - Boolean, defaults to False, indicating if secondary indices should be - disconnected. - """ - _LOG.debug(" %s%sDbBsddb %s transaction begin for '%s'" - % ("Magic " if not getattr(transaction, 'no_magic', False) - else "", - "Batch " if transaction.batch else "", - hex(id(self)), - transaction.get_description())) - if self.txn is not None: - msg = self.transaction.get_description() - self.transaction_abort(self.transaction) - raise DbError(_('A second transaction is started while there' - ' is still a transaction, "%s", active in the database.') % msg) - - if not isinstance(transaction, DbTxn) or len(transaction) != 0: - raise TypeError("transaction_begin must be called with an empty " - "instance of DbTxn which typically happens by using the " - "DbTxn instance as a context manager.") - - self.transaction = transaction - if transaction.batch: - # A batch transaction does not store the commits - # Aborting the session completely will become impossible. - self.abort_possible = False - # Undo is also impossible after batch transaction - self.undodb.clear() - self.env.txn_checkpoint() - - if (self.secondary_connected and - not getattr(transaction, 'no_magic', False)): - # Disconnect unneeded secondary indices - self.surnames.close() - _db = db.DB(self.env) - try: - _db.remove(_mkname(self.full_name, SURNAMES), SURNAMES) - except db.DBNoSuchFileError: - pass - - self.reference_map_referenced_map.close() - _db = db.DB(self.env) - try: - _db.remove(_mkname(self.full_name, REF_REF), REF_REF) - except db.DBNoSuchFileError: - pass - else: - self.bsddbtxn = BSDDBTxn(self.env) - self.txn = self.bsddbtxn.begin() - return transaction - - @catch_db_error - def transaction_commit(self, transaction): - """ - Make the changes to the database final and add the content of the - transaction to the undo database. - """ - msg = transaction.get_description() - if self._LOG_ALL: - _LOG.debug("%s: Transaction commit '%s'\n" - % (self.__class__.__name__, msg)) - - if self.readonly: - return - - if self.txn is not None: - assert msg != '' - self.bsddbtxn.commit() - self.bsddbtxn = None - self.txn = None - self.env.log_flush() - if not transaction.batch: - # do deletes and adds first - for trans_type in [TXNDEL, TXNADD, TXNUPD]: - for obj_type in range(11): - if obj_type != REFERENCE_KEY: - self.__emit(transaction, obj_type, trans_type) - self.transaction = None - transaction.clear() - self.undodb.commit(transaction, msg) - self.__after_commit(transaction) - self.has_changed = True - _LOG.debug(" %s%sDbBsddb %s transaction commit for '%s'" - % ("Magic " if not getattr(transaction, 'no_magic', False) - else "", - "Batch " if transaction.batch else "", - hex(id(self)), - transaction.get_description())) - - def __emit(self, transaction, obj_type, trans_type): - """ - Define helper function to do the actual emits - """ - if (obj_type, trans_type) in transaction: - if trans_type == TXNDEL: - handles = [handle.decode('utf-8') for handle, data in - transaction[(obj_type, trans_type)]] - else: - handles = [handle.decode('utf-8') for handle, data in - transaction[(obj_type, trans_type)] - if (handle, None) not in transaction[(obj_type, - TXNDEL)]] - if handles: - self.emit(KEY_TO_NAME_MAP[obj_type] + - ['-add', '-update', '-delete'][trans_type], - (handles, )) - - def transaction_abort(self, transaction): - """ - Revert the changes made to the database so far during the transaction. - """ - if self._LOG_ALL: - _LOG.debug("%s: Transaction abort '%s'\n" % - (self.__class__.__name__, transaction.get_description())) - - if self.readonly: - return - - if self.txn is not None: - self.bsddbtxn.abort() - self.bsddbtxn = None - self.txn = None - if not transaction.batch: - # It can occur that the listview is already updated because of - # the "model-treeview automatic update" combined with a - # "while Gtk.events_pending(): Gtk.main_iteration() loop" - # (typically used in a progress bar), so emit rebuild signals - # to correct that. - object_types = set([x[0] for x in list(transaction.keys())]) - for object_type in object_types: - if object_type == REFERENCE_KEY: - continue - self.emit('%s-rebuild' % KEY_TO_NAME_MAP[object_type], ()) - self.transaction = None - transaction.clear() - transaction.first = None - transaction.last = None - self.__after_commit(transaction) - - def __after_commit(self, transaction): - """ - Post-transaction commit processing - """ - if transaction.batch: - self.env.txn_checkpoint() - - if not getattr(transaction, 'no_magic', False): - # create new secondary indices to replace the ones removed - - self.surnames = self.__open_db(self.full_name, SURNAMES, - db.DB_BTREE, db.DB_DUP | db.DB_DUPSORT) - - self.person_map.associate(self.surnames, find_byte_surname, - DBFLAGS_O) - - self.reference_map_referenced_map = self.__open_db(self.full_name, - REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) - - self.reference_map.associate(self.reference_map_referenced_map, - find_referenced_handle, DBFLAGS_O) - - # Only build surname list after surname index is surely back - self.__build_surname_list() - - # Reset callbacks if necessary - if transaction.batch or not len(transaction): - return - if self.undo_callback: - self.undo_callback(_("_Undo %s") % transaction.get_description()) - if self.redo_callback: - self.redo_callback(None) - if self.undo_history_callback: - self.undo_history_callback() - - def undo(self, update_history=True): - return self.undodb.undo(update_history) - - def redo(self, update_history=True): - return self.undodb.redo(update_history) - - def _gramps_upgrade(self, callback=None): - UpdateCallback.__init__(self, callback) - - version = self.metadata.get(b'version', default=_MINVERSION) - - t = time.time() - - from . import upgrade - - if version < 14: - upgrade.gramps_upgrade_14(self) - if version < 15: - upgrade.gramps_upgrade_15(self) - if version < 16: - upgrade.gramps_upgrade_16(self) - if version < 17: - upgrade.gramps_upgrade_17(self) - if version < 18: - upgrade.gramps_upgrade_18(self) - if version < 19: - upgrade.gramps_upgrade_19(self) - - self.reset() - self.set_total(6) - self.__connect_secondary() - self.rebuild_secondary() - # Open undo database - self.__open_undodb() - self.db_is_open = True - self.reindex_reference_map(self.update) - self.reset() - # Close undo database - self.__close_undodb() - self.db_is_open = False - - - _LOG.debug("Upgrade time: %d seconds" % int(time.time()-t)) - - def _set_auto_remove(self): - """ - BSDDB change log settings using new method with renamed attributes - """ - autoremove_flag = None - autoremove_method = None - for flag in ["DB_LOG_AUTO_REMOVE", "DB_LOG_AUTOREMOVE"]: - if hasattr(db, flag): - autoremove_flag = getattr(db, flag) - break - for method in ["log_set_config", "set_flags"]: - if hasattr(self.env, method): - autoremove_method = getattr(self.env, method) - break - if autoremove_method and autoremove_flag: - autoremove_method(autoremove_flag, 1) - else: - _LOG.debug("Failed to set autoremove flag") - - def _write_version(self, name): - """Write version number for a newly created DB.""" - full_name = os.path.abspath(name) - - self.env = db.DBEnv() - self.env.set_cachesize(0, DBCACHE) - - # These env settings are only needed for Txn environment - self.env.set_lk_max_locks(DBLOCKS) - self.env.set_lk_max_objects(DBOBJECTS) - - # clean up unused logs - self._set_auto_remove() - - # The DB_PRIVATE flag must go if we ever move to multi-user setup - env_flags = db.DB_CREATE | db.DB_PRIVATE |\ - db.DB_INIT_MPOOL |\ - db.DB_INIT_LOG | db.DB_INIT_TXN - - # As opposed to before, we always try recovery on databases - env_flags |= db.DB_RECOVER - - # Environment name is now based on the filename - env_name = name - - self.env.open(env_name, env_flags) - self.env.txn_checkpoint() - - self.metadata = self.__open_shelf(full_name, META) - - _LOG.debug("Write schema version %s" % _DBVERSION) - with BSDDBTxn(self.env, self.metadata) as txn: - txn.put(b'version', _DBVERSION) - - versionpath = os.path.join(name, BDBVERSFN) - version = str(db.version()) - _LOG.debug("Write bsddb version %s" % version) - with open(versionpath, "w") as version_file: - version_file.write(version) - - versionpath = os.path.join(name, "pythonversion.txt") - version = str(version_info[0]) - _LOG.debug("Write python version file to %s" % version) - with open(versionpath, "w") as version_file: - version_file.write(version) - - versionpath = os.path.join(name, str(PCKVERSFN)) - _LOG.debug("Write pickle version file to %s" % "Yes") - with open(versionpath, "w") as version_file: - version = "Yes" - version_file.write(version) - - versionpath = os.path.join(name, str(SCHVERSFN)) - _LOG.debug("Write schema version file to %s" % str(_DBVERSION)) - with open(versionpath, "w") as version_file: - version = str(_DBVERSION) - version_file.write(version) - - self.metadata.close() - self.env.close() - - def get_dbid(self): - """ - In BSDDB, we use the file directory name as the unique ID for - this database on this computer. - """ - return self.brief_name - - def get_summary(self): - """ - Returns dictionary of summary item. - Should include, if possible: - - _("Number of people") - _("Version") - _("Schema version") - """ - schema_version = self.metadata.get(b'version', default=None) - bdbversion_file = os.path.join(self.path, BDBVERSFN) - if os.path.isfile(bdbversion_file): - with open(bdbversion_file) as vers_file: - bsddb_version = vers_file.readline().strip() - bsddb_version = ".".join([str(v) for v in safe_eval(bsddb_version)]) - else: - bsddb_version = _("Unknown") - return { - _("Number of people"): self.get_number_of_people(), - _("Number of families"): self.get_number_of_families(), - _("Number of sources"): self.get_number_of_sources(), - _("Number of citations"): self.get_number_of_citations(), - _("Number of events"): self.get_number_of_events(), - _("Number of media"): self.get_number_of_media(), - _("Number of places"): self.get_number_of_places(), - _("Number of repositories"): self.get_number_of_repositories(), - _("Number of notes"): self.get_number_of_notes(), - _("Number of tags"): self.get_number_of_tags(), - _("Schema version"): schema_version, - _("Database version"): bsddb_version, - } - -def _mkname(path, name): - return os.path.join(path, name + DBEXT) - -def upgrade_researcher(owner_data): - """ - Upgrade researcher data to include a locality field in the address. - This should be called for databases prior to Gramps 3.3. - """ - addr = tuple([owner_data[0][0], ''] + list(owner_data[0][1:])) - return (addr, owner_data[1], owner_data[2], owner_data[3]) - diff --git a/gramps/plugins/db/dbapi/dbapi.py b/gramps/plugins/db/dbapi/dbapi.py index 852818d4a..f20e383e4 100644 --- a/gramps/plugins/db/dbapi/dbapi.py +++ b/gramps/plugins/db/dbapi/dbapi.py @@ -628,6 +628,29 @@ class DBAPI(DbGeneric): return old_data + def _commit_raw(self, data, obj_key): + """ + Commit a serialized primary object to the database, storing the + changes as part of the transaction. + """ + table = KEY_TO_NAME_MAP[obj_key] + handle = data[0] + + if self._has_handle(obj_key, handle): + # update the object: + sql = "UPDATE %s SET blob_data = ? WHERE handle = ?" % table + self.dbapi.execute(sql, + [pickle.dumps(data), + handle]) + else: + # Insert the object: + sql = ("INSERT INTO %s (handle, blob_data) VALUES (?, ?)") % table + self.dbapi.execute(sql, + [handle, + pickle.dumps(data)]) + + return + def _update_backlinks(self, obj, transaction): # Find existing references