Merge pull request #1018 from prculley/no_magic
This commit is contained in:
commit
2e8d22ea4a
@ -261,14 +261,8 @@ def get_participant_from_event(db, event_handle, all_=False):
|
||||
"""
|
||||
participant = ""
|
||||
ellipses = False
|
||||
try:
|
||||
result_list = list(db.find_backlink_handles(event_handle,
|
||||
include_classes=['Person', 'Family']))
|
||||
except:
|
||||
# during a magic batch transaction find_backlink_handles tries to
|
||||
# access the reference_map_referenced_map which is closed
|
||||
# under those circumstances.
|
||||
return ''
|
||||
result_list = list(db.find_backlink_handles(
|
||||
event_handle, include_classes=['Person', 'Family']))
|
||||
|
||||
#obtain handles without duplicates
|
||||
people = set([x[1] for x in result_list if x[0] == 'Person'])
|
||||
|
@ -100,16 +100,11 @@ def make_unknown(class_arg, explanation, class_func, commit_func, transaction,
|
||||
elif isinstance(obj, Family):
|
||||
obj.set_relationship(FamilyRelType.UNKNOWN)
|
||||
handle = obj.handle
|
||||
if getattr(argv['db'].transaction, 'no_magic', False):
|
||||
backlinks = argv['db'].find_backlink_handles(
|
||||
handle, [Person.__name__])
|
||||
for dummy, person_handle in backlinks:
|
||||
person = argv['db'].get_person_from_handle(person_handle)
|
||||
add_personref_to_family(obj, person)
|
||||
else:
|
||||
for person in argv['db'].iter_people():
|
||||
if person._has_handle_reference('Family', handle):
|
||||
add_personref_to_family(obj, person)
|
||||
backlinks = argv['db'].find_backlink_handles(
|
||||
handle, [Person.__name__])
|
||||
for dummy, person_handle in backlinks:
|
||||
person = argv['db'].get_person_from_handle(person_handle)
|
||||
add_personref_to_family(obj, person)
|
||||
elif isinstance(obj, Event):
|
||||
if 'type' in argv:
|
||||
obj.set_type(argv['type'])
|
||||
|
@ -262,9 +262,6 @@ class DBAPI(DbGeneric):
|
||||
TXNUPD: "-update",
|
||||
TXNDEL: "-delete",
|
||||
None: "-delete"}
|
||||
if txn.batch:
|
||||
# FIXME: need a User GUI update callback here:
|
||||
self.reindex_reference_map(lambda percent: percent)
|
||||
self.dbapi.commit()
|
||||
if not txn.batch:
|
||||
# Now, emit signals:
|
||||
@ -610,8 +607,8 @@ class DBAPI(DbGeneric):
|
||||
[obj.handle,
|
||||
pickle.dumps(obj.serialize())])
|
||||
self._update_secondary_values(obj)
|
||||
self._update_backlinks(obj, trans)
|
||||
if not trans.batch:
|
||||
self._update_backlinks(obj, trans)
|
||||
if old_data:
|
||||
trans.add(obj_key, TXNUPD, obj.handle,
|
||||
old_data,
|
||||
@ -648,33 +645,33 @@ class DBAPI(DbGeneric):
|
||||
|
||||
def _update_backlinks(self, obj, transaction):
|
||||
|
||||
# Find existing references
|
||||
sql = ("SELECT ref_class, ref_handle " +
|
||||
"FROM reference WHERE obj_handle = ?")
|
||||
self.dbapi.execute(sql, [obj.handle])
|
||||
existing_references = set(self.dbapi.fetchall())
|
||||
|
||||
# Once we have the list of rows that already have a reference
|
||||
# we need to compare it with the list of objects that are
|
||||
# still references from the primary object.
|
||||
current_references = set(obj.get_referenced_handles_recursively())
|
||||
no_longer_required_references = existing_references.difference(
|
||||
current_references)
|
||||
new_references = current_references.difference(existing_references)
|
||||
|
||||
# Delete the existing references
|
||||
self.dbapi.execute("DELETE FROM reference WHERE obj_handle = ?",
|
||||
[obj.handle])
|
||||
|
||||
# Now, add the current ones
|
||||
for (ref_class_name, ref_handle) in current_references:
|
||||
sql = ("INSERT INTO reference " +
|
||||
"(obj_handle, obj_class, ref_handle, ref_class)" +
|
||||
"VALUES(?, ?, ?, ?)")
|
||||
self.dbapi.execute(sql, [obj.handle, obj.__class__.__name__,
|
||||
ref_handle, ref_class_name])
|
||||
|
||||
if not transaction.batch:
|
||||
# Find existing references
|
||||
sql = ("SELECT ref_class, ref_handle " +
|
||||
"FROM reference WHERE obj_handle = ?")
|
||||
self.dbapi.execute(sql, [obj.handle])
|
||||
existing_references = set(self.dbapi.fetchall())
|
||||
|
||||
# Once we have the list of rows that already have a reference
|
||||
# we need to compare it with the list of objects that are
|
||||
# still references from the primary object.
|
||||
current_references = set(obj.get_referenced_handles_recursively())
|
||||
no_longer_required_references = existing_references.difference(
|
||||
current_references)
|
||||
new_references = current_references.difference(existing_references)
|
||||
|
||||
# Delete the existing references
|
||||
self.dbapi.execute("DELETE FROM reference WHERE obj_handle = ?",
|
||||
[obj.handle])
|
||||
|
||||
# Now, add the current ones
|
||||
for (ref_class_name, ref_handle) in current_references:
|
||||
sql = ("INSERT INTO reference " +
|
||||
"(obj_handle, obj_class, ref_handle, ref_class)" +
|
||||
"VALUES(?, ?, ?, ?)")
|
||||
self.dbapi.execute(sql, [obj.handle, obj.__class__.__name__,
|
||||
ref_handle, ref_class_name])
|
||||
|
||||
# Add new references to the transaction
|
||||
for (ref_class_name, ref_handle) in new_references:
|
||||
key = (obj.handle, ref_handle)
|
||||
@ -688,6 +685,20 @@ class DBAPI(DbGeneric):
|
||||
old_data = (obj.handle, obj.__class__.__name__,
|
||||
ref_handle, ref_class_name)
|
||||
transaction.add(REFERENCE_KEY, TXNDEL, key, old_data, None)
|
||||
else: # batch mode
|
||||
current_references = set(obj.get_referenced_handles_recursively())
|
||||
|
||||
# Delete the existing references
|
||||
self.dbapi.execute("DELETE FROM reference WHERE obj_handle = ?",
|
||||
[obj.handle])
|
||||
|
||||
# Now, add the current ones
|
||||
for (ref_class_name, ref_handle) in current_references:
|
||||
sql = ("INSERT INTO reference " +
|
||||
"(obj_handle, obj_class, ref_handle, ref_class)" +
|
||||
"VALUES(?, ?, ?, ?)")
|
||||
self.dbapi.execute(sql, [obj.handle, obj.__class__.__name__,
|
||||
ref_handle, ref_class_name])
|
||||
|
||||
def _do_remove(self, handle, transaction, obj_key):
|
||||
if self.readonly or not handle:
|
||||
|
@ -916,12 +916,7 @@ class GrampsParser(UpdateCallback):
|
||||
:param ifile: must be a file handle that is already open, with position
|
||||
at the start of the file
|
||||
"""
|
||||
if personcount < 1000:
|
||||
no_magic = True
|
||||
else:
|
||||
no_magic = False
|
||||
with DbTxn(_("Gramps XML import"), self.db, batch=True,
|
||||
no_magic=no_magic) as self.trans:
|
||||
with DbTxn(_("Gramps XML import"), self.db, batch=True) as self.trans:
|
||||
self.set_total(linecount)
|
||||
|
||||
self.db.disable_signals()
|
||||
|
@ -2737,9 +2737,8 @@ class GedcomParser(UpdateCallback):
|
||||
0 TRLR {1:1}
|
||||
|
||||
"""
|
||||
no_magic = self.maxpeople < 1000
|
||||
with DbTxn(_("GEDCOM import"), self.dbase, not use_trans,
|
||||
no_magic=no_magic) as self.trans:
|
||||
with DbTxn(_("GEDCOM import"), self.dbase,
|
||||
not use_trans) as self.trans:
|
||||
|
||||
self.dbase.disable_signals()
|
||||
self.__parse_header_head()
|
||||
|
@ -225,11 +225,6 @@ class Check(tool.BatchTool):
|
||||
checker.check_checksum()
|
||||
checker.check_media_sourceref()
|
||||
checker.check_note_links()
|
||||
|
||||
# for bsddb the check_backlinks doesn't work in 'batch' mode because
|
||||
# the table used for backlinks is closed.
|
||||
with DbTxn(_("Check Backlink Integrity"), self.db,
|
||||
batch=False) as checker.trans:
|
||||
checker.check_backlinks()
|
||||
|
||||
# rebuilding reference maps needs to be done outside of a transaction
|
||||
@ -2235,6 +2230,7 @@ class CheckIntegrity:
|
||||
gid_list.append(gid)
|
||||
gid_list = []
|
||||
for note in self.db.iter_notes():
|
||||
self.progress.step()
|
||||
ogid = gid = note.get_gramps_id()
|
||||
if gid in gid_list:
|
||||
gid = self.db.find_next_note_gramps_id()
|
||||
|
Loading…
Reference in New Issue
Block a user