9810: Fix undo/redo reference updates

Also rewrite to avoid using table maps.
This commit is contained in:
Nick Hall 2016-11-26 15:29:40 +00:00
parent e1dca2bfaa
commit baff7cddb0
3 changed files with 103 additions and 73 deletions

View File

@ -146,9 +146,9 @@ class DbGenericUndo(DbUndo):
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.undo_reference(new_data, handle, self.mapbase[key])
self.undo_reference(new_data, handle)
else:
self.undo_data(new_data, handle, self.mapbase[key],
self.undo_data(new_data, handle, key,
db.emit, SIGBASE[key])
self.db.transaction_backend_commit()
except:
@ -184,15 +184,22 @@ class DbGenericUndo(DbUndo):
subitems = transaction.get_recnos(reverse=True)
# Process all records in the transaction
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
try:
self.db.transaction_backend_begin()
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.undo_reference(old_data, handle)
else:
self.undo_data(old_data, handle, key, db.emit, SIGBASE[key])
self.db.transaction_backend_commit()
except:
self.db.transaction_backend_abort()
raise
if key == REFERENCE_KEY:
self.undo_reference(old_data, handle, self.mapbase[key])
else:
self.undo_data(old_data, handle, self.mapbase[key],
db.emit, SIGBASE[key])
# Notify listeners
if db.undo_callback:
if self.undo_count > 0:
@ -209,6 +216,43 @@ class DbGenericUndo(DbUndo):
db.undo_history_callback()
return True
def undo_reference(self, data, handle):
"""
Helper method to undo a reference map entry
"""
if data is None:
sql = ("DELETE FROM reference " +
"WHERE obj_handle = ? AND ref_handle = ?")
self.db.dbapi.execute(sql, [handle[0], handle[1]])
else:
sql = ("INSERT INTO reference " +
"(obj_handle, obj_class, ref_handle, ref_class) " +
"VALUES(?, ?, ?, ?)")
self.db.dbapi.execute(sql, data)
def undo_data(self, data, handle, obj_key, emit, signal_root):
"""
Helper method to undo/redo the changes made
"""
cls = KEY_TO_CLASS_MAP[obj_key]
table = cls.lower()
if data is None:
emit(signal_root + '-delete', ([handle],))
sql = "DELETE FROM %s WHERE handle = ?" % table
self.db.dbapi.execute(sql, [handle])
else:
if self.db.has_handle(obj_key, handle):
signal = signal_root + '-update'
sql = "UPDATE %s SET blob_data = ? WHERE handle = ?" % table
self.db.dbapi.execute(sql, [pickle.dumps(data), handle])
else:
signal = signal_root + '-add'
sql = "INSERT INTO %s (handle, blob_data) VALUES (?, ?)" % table
self.db.dbapi.execute(sql, [handle, pickle.dumps(data)])
obj = self.db.get_table_func(cls)["class_func"].create(data)
self.db.update_secondary_values(obj)
emit(signal, ([handle],))
class Table:
"""
Implements Table interface.

View File

@ -5,7 +5,6 @@
#-------------------------------------------------------------------------
from abc import ABCMeta, abstractmethod
import time
import pickle
from collections import deque
class DbUndo(metaclass=ABCMeta):
@ -14,33 +13,16 @@ class DbUndo(metaclass=ABCMeta):
for use with a real backend.
"""
__slots__ = ('undodb', 'db', 'mapbase', 'undo_history_timestamp',
'txn', 'undoq', 'redoq')
__slots__ = ('undodb', 'db', 'undo_history_timestamp', 'undoq', 'redoq')
def __init__(self, grampsdb):
def __init__(self, db):
"""
Class constructor. Set up main instance variables
"""
self.db = grampsdb
self.db = db
self.undoq = deque()
self.redoq = deque()
self.undo_history_timestamp = time.time()
self.txn = None
# N.B. the databases have to be in the same order as the numbers in
# xxx_KEY in gen/db/dbconst.py
self.mapbase = (
self.db.person_map,
self.db.family_map,
self.db.source_map,
self.db.event_map,
self.db.media_map,
self.db.place_map,
self.db.repository_map,
None,
self.db.note_map,
self.db.tag_map,
self.db.citation_map,
)
def clear(self):
"""
@ -49,7 +31,6 @@ class DbUndo(metaclass=ABCMeta):
self.undoq.clear()
self.redoq.clear()
self.undo_history_timestamp = time.time()
self.txn = None
def __enter__(self, value):
"""
@ -142,30 +123,5 @@ class DbUndo(metaclass=ABCMeta):
return False
return self._redo(update_history)
def undo_reference(self, data, handle, db_map):
"""
Helper method to undo a reference map entry
"""
if data is None:
db_map.delete(handle)
else:
db_map[handle] = data
def undo_data(self, data, handle, db_map, emit, signal_root):
"""
Helper method to undo/redo the changes made
"""
if data is None:
emit(signal_root + '-delete', ([handle],))
db_map.delete(handle)
else:
ex_data = db_map[handle]
if ex_data:
signal = signal_root + '-update'
else:
signal = signal_root + '-add'
db_map[handle] = data
emit(signal, ([handle],))
undo_count = property(lambda self:len(self.undoq))
redo_count = property(lambda self:len(self.redoq))

View File

@ -41,7 +41,8 @@ from gramps.gen.db.dbconst import (DBLOGNAME, DBBACKEND, KEY_TO_NAME_MAP,
TXNADD, TXNUPD, TXNDEL,
PERSON_KEY, FAMILY_KEY, SOURCE_KEY,
EVENT_KEY, MEDIA_KEY, PLACE_KEY, NOTE_KEY,
TAG_KEY, CITATION_KEY, REPOSITORY_KEY)
TAG_KEY, CITATION_KEY, REPOSITORY_KEY,
REFERENCE_KEY)
from gramps.gen.db.generic import DbGeneric
from gramps.gen.lib import (Tag, Media, Person, Family, Source,
Citation, Event, Place, Repository, Note)
@ -370,6 +371,8 @@ class DBAPI(DbGeneric):
if not txn.batch:
# Now, emit signals:
for (obj_type_val, txn_type_val) in list(txn):
if obj_type_val == REFERENCE_KEY:
continue
if txn_type_val == TXNDEL:
handles = [handle for (handle, data) in
txn[(obj_type_val, txn_type_val)]]
@ -721,7 +724,7 @@ class DBAPI(DbGeneric):
pickle.dumps(obj.serialize())])
self.update_secondary_values(obj)
if not trans.batch:
self.update_backlinks(obj)
self.update_backlinks(obj, trans)
if old_data:
trans.add(obj_key, TXNUPD, obj.handle,
old_data,
@ -941,21 +944,48 @@ class DBAPI(DbGeneric):
[str(attr.type) for attr in media.attribute_list
if attr.type.is_custom() and str(attr.type)])
def update_backlinks(self, obj):
# First, delete the current references:
self.dbapi.execute("DELETE FROM reference WHERE obj_handle = ?;",
def update_backlinks(self, obj, transaction):
# Find existing references
sql = ("SELECT ref_class, ref_handle " +
"FROM reference WHERE obj_handle = ?")
self.dbapi.execute(sql, [obj.handle])
existing_references = set(self.dbapi.fetchall())
# Once we have the list of rows that already have a reference
# we need to compare it with the list of objects that are
# still references from the primary object.
current_references = set(obj.get_referenced_handles_recursively())
no_longer_required_references = existing_references.difference(
current_references)
new_references = current_references.difference(existing_references)
# Delete the existing references
self.dbapi.execute("DELETE FROM reference WHERE obj_handle = ?",
[obj.handle])
# Now, add the current ones:
references = set(obj.get_referenced_handles_recursively())
for (ref_class_name, ref_handle) in references:
self.dbapi.execute("""INSERT INTO reference
(obj_handle, obj_class, ref_handle, ref_class)
VALUES(?, ?, ?, ?);""",
[obj.handle,
obj.__class__.__name__,
ref_handle,
ref_class_name])
# This function is followed by a commit.
# Now, add the current ones
for (ref_class_name, ref_handle) in current_references:
sql = ("INSERT INTO reference " +
"(obj_handle, obj_class, ref_handle, ref_class)" +
"VALUES(?, ?, ?, ?)")
self.dbapi.execute(sql, [obj.handle, obj.__class__.__name__,
ref_handle, ref_class_name])
if not transaction.batch:
# Add new references to the transaction
for (ref_class_name, ref_handle) in new_references:
key = (obj.handle, ref_handle)
data = (obj.handle, obj.__class__.__name__,
ref_handle, ref_class_name)
transaction.add(REFERENCE_KEY, TXNADD, key, None, data)
# Add old references to the transaction
for (ref_class_name, ref_handle) in no_longer_required_references:
key = (obj.handle, ref_handle)
old_data = (obj.handle, obj.__class__.__name__,
ref_handle, ref_class_name)
transaction.add(REFERENCE_KEY, TXNDEL, key, old_data, None)
def _do_remove(self, handle, transaction, obj_key):
if isinstance(handle, bytes):