From 6975e3086149536384bce52eddf108e882d4ae45 Mon Sep 17 00:00:00 2001 From: Paul Franklin Date: Fri, 15 Jul 2016 11:53:24 -0700 Subject: [PATCH] increase pylint score from 7.17 to 9.02 --- gramps/plugins/db/dbapi/dbapi.py | 736 +++++++++++++++++++++---------- 1 file changed, 510 insertions(+), 226 deletions(-) diff --git a/gramps/plugins/db/dbapi/dbapi.py b/gramps/plugins/db/dbapi/dbapi.py index 73d664787..d2ee4cc1a 100644 --- a/gramps/plugins/db/dbapi/dbapi.py +++ b/gramps/plugins/db/dbapi/dbapi.py @@ -23,6 +23,8 @@ # Standard python modules # #------------------------------------------------------------------------- +import os +import shutil import time import sys import pickle @@ -34,9 +36,16 @@ import logging # Gramps Modules # #------------------------------------------------------------------------ -from gramps.gen.db.generic import * -from gramps.gen.db.dbconst import DBLOGNAME -from gramps.gen.config import config +from gramps.gen.db.base import eval_order_by +from gramps.gen.db.dbconst import (DBLOGNAME, DBBACKEND, KEY_TO_NAME_MAP, + TXNADD, TXNUPD, TXNDEL, + PERSON_KEY, FAMILY_KEY, SOURCE_KEY, + EVENT_KEY, MEDIA_KEY, PLACE_KEY, NOTE_KEY, + TAG_KEY, CITATION_KEY, REPOSITORY_KEY) +from gramps.gen.db.generic import DbGeneric +from gramps.gen.lib import (Tag, Media, Person, Family, Source, + Citation, Event, Place, Repository, Note) +from gramps.gen.lib.genderstats import GenderStats from gramps.gen.const import GRAMPS_LOCALE as glocale _ = glocale.translation.gettext @@ -49,6 +58,9 @@ class DBAPI(DbGeneric): """ @classmethod def get_class_summary(cls): + """ + Return a diction of information about this database. + """ summary = { "DB-API version": "2.0", "Database type": cls.__name__, @@ -100,21 +112,21 @@ class DBAPI(DbGeneric): def write_version(self, directory): """Write files for a newly created DB.""" versionpath = os.path.join(directory, "bdbversion.txt") - _LOG.debug("Write bsddb version %s" % str(self.VERSION)) + _LOG.debug("Write bsddb version %s", str(self.VERSION)) with open(versionpath, "w") as version_file: version_file.write(str(self.VERSION)) versionpath = os.path.join(directory, "pythonversion.txt") - _LOG.debug("Write python version file to %s" % str(sys.version_info[0])) + _LOG.debug("Write python version file to %s", str(sys.version_info[0])) with open(versionpath, "w") as version_file: version_file.write(str(sys.version_info[0])) versionpath = os.path.join(directory, "pickleupgrade.txt") - _LOG.debug("Write pickle version file to %s" % "Yes") + _LOG.debug("Write pickle version file to %s", "Yes") with open(versionpath, "w") as version_file: version_file.write("YES") - _LOG.debug("Write schema version file to %s" % str(self.VERSION[0])) + _LOG.debug("Write schema version file to %s", str(self.VERSION[0])) versionpath = os.path.join(directory, "schemaversion.txt") with open(versionpath, "w") as version_file: version_file.write(str(self.VERSION[0])) @@ -137,22 +149,22 @@ class DBAPI(DbGeneric): # Run code from directory from gramps.gen.utils.configmanager import ConfigManager config_file = os.path.join(directory, 'settings.ini') - config = ConfigManager(config_file) - config.register('database.dbtype', 'sqlite') - config.register('database.dbname', 'gramps') - config.register('database.host', 'localhost') - config.register('database.user', 'user') - config.register('database.password', 'password') - config.register('database.port', 'port') - config.load() # load from settings.ini + config_mgr = ConfigManager(config_file) + config_mgr.register('database.dbtype', 'sqlite') + config_mgr.register('database.dbname', 'gramps') + config_mgr.register('database.host', 'localhost') + config_mgr.register('database.user', 'user') + config_mgr.register('database.password', 'password') + config_mgr.register('database.port', 'port') + config_mgr.load() # load from settings.ini settings = { "__file__": os.path.join(directory, "settings.py"), - "config": config + "config": config_mgr } settings_file = os.path.join(directory, "settings.py") - with open(settings_file) as f: - code = compile(f.read(), settings_file, 'exec') + with open(settings_file) as file: + code = compile(file.read(), settings_file, 'exec') exec(code, globals(), settings) self.dbapi = settings["dbapi"] self.update_schema() @@ -358,7 +370,7 @@ class DBAPI(DbGeneric): Lowlevel interface to the backend transaction. Executes a db BEGIN; """ - _LOG.debug(" DBAPI %s transaction begin" % hex(id(self))) + _LOG.debug(" DBAPI %s transaction begin", hex(id(self))) self.dbapi.begin() def transaction_backend_commit(self): @@ -366,8 +378,7 @@ class DBAPI(DbGeneric): Lowlevel interface to the backend transaction. Executes a db END; """ - _LOG.debug(" DBAPI %s transaction commit" - % hex(id(self))) + _LOG.debug(" DBAPI %s transaction commit", hex(id(self))) self.dbapi.commit() def transaction_backend_abort(self): @@ -381,9 +392,9 @@ class DBAPI(DbGeneric): """ Transactions are handled automatically by the db layer. """ - _LOG.debug(" %sDBAPI %s transaction begin for '%s'" - % ("Batch " if transaction.batch else "", - hex(id(self)), transaction.get_description())) + _LOG.debug(" %sDBAPI %s transaction begin for '%s'", + "Batch " if transaction.batch else "", + hex(id(self)), transaction.get_description()) self.transaction = transaction self.dbapi.begin() return transaction @@ -392,9 +403,9 @@ class DBAPI(DbGeneric): """ Executed at the end of a transaction. """ - _LOG.debug(" %sDBAPI %s transaction commit for '%s'" - % ("Batch " if txn.batch else "", hex(id(self)), - txn.get_description())) + _LOG.debug(" %sDBAPI %s transaction commit for '%s'", + "Batch " if txn.batch else "", + hex(id(self)), txn.get_description()) action = {TXNADD: "-add", TXNUPD: "-update", @@ -414,9 +425,11 @@ class DBAPI(DbGeneric): else: handles = [handle for (handle, data) in txn[(obj_type_val, txn_type_val)] - if (handle, None) not in txn[(obj_type_val, TXNDEL)]] + if (handle, None) + not in txn[(obj_type_val, TXNDEL)]] if handles: - signal = KEY_TO_NAME_MAP[obj_type_val] + action[txn_type_val] + signal = KEY_TO_NAME_MAP[ + obj_type_val] + action[txn_type_val] self.emit(signal, (handles, )) self.transaction = None msg = txn.get_description() @@ -440,7 +453,8 @@ class DBAPI(DbGeneric): """ Get an item from the database. """ - self.dbapi.execute("SELECT value FROM metadata WHERE setting = ?;", [key]) + self.dbapi.execute( + "SELECT value FROM metadata WHERE setting = ?;", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -457,20 +471,28 @@ class DBAPI(DbGeneric): self.dbapi.execute("SELECT 1 FROM metadata WHERE setting = ?;", [key]) row = self.dbapi.fetchone() if row: - self.dbapi.execute("UPDATE metadata SET value = ? WHERE setting = ?;", - [pickle.dumps(value), key]) + self.dbapi.execute( + "UPDATE metadata SET value = ? WHERE setting = ?;", + [pickle.dumps(value), key]) else: - self.dbapi.execute("INSERT INTO metadata (setting, value) VALUES (?, ?);", - [key, pickle.dumps(value)]) + self.dbapi.execute( + "INSERT INTO metadata (setting, value) VALUES (?, ?);", + [key, pickle.dumps(value)]) def get_name_group_keys(self): + """ + Return the defined names that have been assigned to a default grouping. + """ self.dbapi.execute("SELECT name FROM name_group ORDER BY name;") rows = self.dbapi.fetchall() return [row[0] for row in rows] def get_name_group_mapping(self, key): - self.dbapi.execute("SELECT grouping FROM name_group WHERE name = ?;", - [key]) + """ + Return the default grouping name for a surname. + """ + self.dbapi.execute( + "SELECT grouping FROM name_group WHERE name = ?;", [key]) row = self.dbapi.fetchone() if row: return row[0] @@ -520,11 +542,21 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_event_handles(self): + """ + Return a list of database handles, one handle for each Event in the + database. + """ self.dbapi.execute("SELECT handle FROM event;") rows = self.dbapi.fetchall() return [bytes(row[0], "utf-8") for row in rows] def get_citation_handles(self, sort_handles=False): + """ + Return a list of database handles, one handle for each Citation in + the database. + + If sort_handles is True, the list is sorted by Citation title. + """ if sort_handles: self.dbapi.execute("SELECT handle FROM citation ORDER BY order_by;") else: @@ -533,6 +565,12 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_source_handles(self, sort_handles=False): + """ + Return a list of database handles, one handle for each Source in + the database. + + If sort_handles is True, the list is sorted by Source title. + """ if sort_handles: self.dbapi.execute("SELECT handle FROM source ORDER BY order_by;") else: @@ -541,6 +579,12 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_place_handles(self, sort_handles=False): + """ + Return a list of database handles, one handle for each Place in + the database. + + If sort_handles is True, the list is sorted by Place title. + """ if sort_handles: self.dbapi.execute("SELECT handle FROM place ORDER BY order_by;") else: @@ -549,11 +593,21 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_repository_handles(self): + """ + Return a list of database handles, one handle for each Repository in + the database. + """ self.dbapi.execute("SELECT handle FROM repository;") rows = self.dbapi.fetchall() return [bytes(row[0], "utf-8") for row in rows] def get_media_handles(self, sort_handles=False): + """ + Return a list of database handles, one handle for each Media in + the database. + + If sort_handles is True, the list is sorted by title. + """ if sort_handles: self.dbapi.execute("SELECT handle FROM media ORDER BY order_by;") else: @@ -562,11 +616,21 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_note_handles(self): + """ + Return a list of database handles, one handle for each Note in the + database. + """ self.dbapi.execute("SELECT handle FROM note;") rows = self.dbapi.fetchall() return [bytes(row[0], "utf-8") for row in rows] def get_tag_handles(self, sort_handles=False): + """ + Return a list of database handles, one handle for each Tag in + the database. + + If sort_handles is True, the list is sorted by Tag name. + """ if sort_handles: self.dbapi.execute("SELECT handle FROM tag ORDER BY order_by;") else: @@ -575,94 +639,140 @@ class DBAPI(DbGeneric): return [bytes(row[0], "utf-8") for row in rows] def get_tag_from_name(self, name): + """ + Find a Tag in the database from the passed Tag name. + + If no such Tag exists, None is returned. + """ self.dbapi.execute("""select handle from tag where order_by = ?;""", - [self._order_by_tag_key(name)]) + [self._order_by_tag_key(name)]) row = self.dbapi.fetchone() if row: return self.get_tag_from_handle(row[0]) return None def get_number_of_people(self): + """ + Return the number of people currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM person;") row = self.dbapi.fetchone() return row[0] def get_number_of_events(self): + """ + Return the number of events currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM event;") row = self.dbapi.fetchone() return row[0] def get_number_of_places(self): + """ + Return the number of places currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM place;") row = self.dbapi.fetchone() return row[0] def get_number_of_tags(self): + """ + Return the number of tags currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM tag;") row = self.dbapi.fetchone() return row[0] def get_number_of_families(self): + """ + Return the number of families currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM family;") row = self.dbapi.fetchone() return row[0] def get_number_of_notes(self): + """ + Return the number of notes currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM note;") row = self.dbapi.fetchone() return row[0] def get_number_of_citations(self): + """ + Return the number of citations currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM citation;") row = self.dbapi.fetchone() return row[0] def get_number_of_sources(self): + """ + Return the number of sources currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM source;") row = self.dbapi.fetchone() return row[0] def get_number_of_media(self): + """ + Return the number of media objects currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM media;") row = self.dbapi.fetchone() return row[0] def get_number_of_repositories(self): + """ + Return the number of source repositories currently in the database. + """ self.dbapi.execute("SELECT count(1) FROM repository;") row = self.dbapi.fetchone() return row[0] def has_name_group_key(self, key): + """ + Return if a key exists in the name_group table. + """ self.dbapi.execute("SELECT grouping FROM name_group WHERE name = ?;", - [key]) + [key]) row = self.dbapi.fetchone() return True if row else False def set_name_group_mapping(self, name, grouping): + """ + Set the default grouping name for a surname. + """ self.dbapi.execute("SELECT 1 FROM name_group WHERE name = ?;", - [name]) + [name]) row = self.dbapi.fetchone() if row: self.dbapi.execute("DELETE FROM name_group WHERE name = ?;", - [name]) - self.dbapi.execute("INSERT INTO name_group (name, grouping) VALUES(?, ?);", - [name, grouping]) + [name]) + self.dbapi.execute( + "INSERT INTO name_group (name, grouping) VALUES(?, ?);", + [name, grouping]) def commit_person(self, person, trans, change_time=None): + """ + Commit the specified Person to the database, storing the changes as + part of the transaction. + """ old_person = None person.change = int(change_time or time.time()) if person.handle in self.person_map: old_person = self.get_person_from_handle(person.handle) # Update gender statistics if necessary - if (old_person.gender != person.gender or - old_person.primary_name.first_name != - person.primary_name.first_name): + if (old_person.gender != person.gender + or (old_person.primary_name.first_name != + person.primary_name.first_name)): self.genderStats.uncount_person(old_person) self.genderStats.count_person(person) # Update surname list if necessary if (self._order_by_person_key(person) != - self._order_by_person_key(old_person)): + self._order_by_person_key(old_person)): self.remove_from_surname_list(old_person) self.add_to_surname_list(person, trans.batch) given_name, surname, gender_type = self.get_person_data(person) @@ -686,14 +796,15 @@ class DBAPI(DbGeneric): self.add_to_surname_list(person, trans.batch) given_name, surname, gender_type = self.get_person_data(person) # Insert the person: - self.dbapi.execute("""INSERT INTO person (handle, order_by, gramps_id, blob_data, - given_name, surname, gender_type) - VALUES(?, ?, ?, ?, ?, ?, ?);""", - [person.handle, - self._order_by_person_key(person), - person.gramps_id, - pickle.dumps(person.serialize()), - given_name, surname, gender_type]) + self.dbapi.execute( + """INSERT INTO person (handle, order_by, gramps_id, blob_data, + given_name, surname, gender_type) + VALUES(?, ?, ?, ?, ?, ?, ?);""", + [person.handle, + self._order_by_person_key(person), + person.gramps_id, + pickle.dumps(person.serialize()), + given_name, surname, gender_type]) self.update_secondary_values(person) if not trans.batch: self.update_backlinks(person) @@ -723,7 +834,7 @@ class DBAPI(DbGeneric): for asurname in person.alternate_names: all_surn += asurname.get_surname_list() self.origin_types.update([str(surn.origintype) for surn in all_surn - if surn.origintype.is_custom()]) + if surn.origintype.is_custom()]) all_surn = None self.url_types.update([str(url.type) for url in person.urls if url.type.is_custom()]) @@ -734,6 +845,10 @@ class DBAPI(DbGeneric): self.media_attributes.update(attr_list) def commit_family(self, family, trans, change_time=None): + """ + Commit the specified Family to the database, storing the changes as + part of the transaction. + """ old_family = None family.change = int(change_time or time.time()) if family.handle in self.family_map: @@ -749,18 +864,20 @@ class DBAPI(DbGeneric): pickle.dumps(family.serialize()), family.handle]) else: - self.dbapi.execute("""INSERT INTO family (handle, gramps_id, father_handle, mother_handle, blob_data) - VALUES(?, ?, ?, ?, ?);""", - [family.handle, - family.gramps_id, - family.father_handle, - family.mother_handle, - pickle.dumps(family.serialize())]) + self.dbapi.execute( + """INSERT INTO family (handle, gramps_id, + father_handle, mother_handle, blob_data) + VALUES(?, ?, ?, ?, ?);""", + [family.handle, + family.gramps_id, + family.father_handle, + family.mother_handle, + pickle.dumps(family.serialize())]) self.update_secondary_values(family) if not trans.batch: self.update_backlinks(family) - op = TXNUPD if old_family else TXNADD - trans.add(FAMILY_KEY, op, family.handle, + db_op = TXNUPD if old_family else TXNADD + trans.add(FAMILY_KEY, db_op, family.handle, old_family, family.serialize()) @@ -791,10 +908,15 @@ class DBAPI(DbGeneric): self.media_attributes.update(attr_list) def commit_citation(self, citation, trans, change_time=None): + """ + Commit the specified Citation to the database, storing the changes as + part of the transaction. + """ old_citation = None citation.change = int(change_time or time.time()) if citation.handle in self.citation_map: - old_citation = self.get_citation_from_handle(citation.handle).serialize() + old_citation = self.get_citation_from_handle( + citation.handle).serialize() self.dbapi.execute("""UPDATE citation SET gramps_id = ?, order_by = ?, blob_data = ? @@ -804,17 +926,18 @@ class DBAPI(DbGeneric): pickle.dumps(citation.serialize()), citation.handle]) else: - self.dbapi.execute("""INSERT INTO citation (handle, order_by, gramps_id, blob_data) - VALUES(?, ?, ?, ?);""", - [citation.handle, - self._order_by_citation_key(citation), - citation.gramps_id, - pickle.dumps(citation.serialize())]) + self.dbapi.execute( + """INSERT INTO citation (handle, order_by, gramps_id, blob_data) + VALUES(?, ?, ?, ?);""", + [citation.handle, + self._order_by_citation_key(citation), + citation.gramps_id, + pickle.dumps(citation.serialize())]) self.update_secondary_values(citation) if not trans.batch: self.update_backlinks(citation) - op = TXNUPD if old_citation else TXNADD - trans.add(CITATION_KEY, op, citation.handle, + db_op = TXNUPD if old_citation else TXNADD + trans.add(CITATION_KEY, db_op, citation.handle, old_citation, citation.serialize()) # Misc updates: @@ -829,6 +952,10 @@ class DBAPI(DbGeneric): if attr.type.is_custom() and str(attr.type)]) def commit_source(self, source, trans, change_time=None): + """ + Commit the specified Source to the database, storing the changes as + part of the transaction. + """ old_source = None source.change = int(change_time or time.time()) if source.handle in self.source_map: @@ -842,17 +969,18 @@ class DBAPI(DbGeneric): pickle.dumps(source.serialize()), source.handle]) else: - self.dbapi.execute("""INSERT INTO source (handle, order_by, gramps_id, blob_data) - VALUES(?, ?, ?, ?);""", - [source.handle, - self._order_by_source_key(source), - source.gramps_id, - pickle.dumps(source.serialize())]) + self.dbapi.execute( + """INSERT INTO source (handle, order_by, gramps_id, blob_data) + VALUES(?, ?, ?, ?);""", + [source.handle, + self._order_by_source_key(source), + source.gramps_id, + pickle.dumps(source.serialize())]) self.update_secondary_values(source) if not trans.batch: self.update_backlinks(source) - op = TXNUPD if old_source else TXNADD - trans.add(SOURCE_KEY, op, source.handle, + db_op = TXNUPD if old_source else TXNADD + trans.add(SOURCE_KEY, db_op, source.handle, old_source, source.serialize()) # Misc updates: @@ -870,10 +998,15 @@ class DBAPI(DbGeneric): if attr.type.is_custom() and str(attr.type)]) def commit_repository(self, repository, trans, change_time=None): + """ + Commit the specified Repository to the database, storing the changes + as part of the transaction. + """ old_repository = None repository.change = int(change_time or time.time()) if repository.handle in self.repository_map: - old_repository = self.get_repository_from_handle(repository.handle).serialize() + old_repository = self.get_repository_from_handle( + repository.handle).serialize() self.dbapi.execute("""UPDATE repository SET gramps_id = ?, blob_data = ? WHERE handle = ?;""", @@ -881,14 +1014,16 @@ class DBAPI(DbGeneric): pickle.dumps(repository.serialize()), repository.handle]) else: - self.dbapi.execute("""INSERT INTO repository (handle, gramps_id, blob_data) - VALUES(?, ?, ?);""", - [repository.handle, repository.gramps_id, pickle.dumps(repository.serialize())]) + self.dbapi.execute( + """INSERT INTO repository (handle, gramps_id, blob_data) + VALUES(?, ?, ?);""", + [repository.handle, repository.gramps_id, + pickle.dumps(repository.serialize())]) self.update_secondary_values(repository) if not trans.batch: self.update_backlinks(repository) - op = TXNUPD if old_repository else TXNADD - trans.add(REPOSITORY_KEY, op, repository.handle, + db_op = TXNUPD if old_repository else TXNADD + trans.add(REPOSITORY_KEY, db_op, repository.handle, old_repository, repository.serialize()) # Misc updates: @@ -899,6 +1034,10 @@ class DBAPI(DbGeneric): if url.type.is_custom()]) def commit_note(self, note, trans, change_time=None): + """ + Commit the specified Note to the database, storing the changes as part + of the transaction. + """ old_note = None note.change = int(change_time or time.time()) if note.handle in self.note_map: @@ -910,14 +1049,15 @@ class DBAPI(DbGeneric): pickle.dumps(note.serialize()), note.handle]) else: - self.dbapi.execute("""INSERT INTO note (handle, gramps_id, blob_data) - VALUES(?, ?, ?);""", - [note.handle, note.gramps_id, pickle.dumps(note.serialize())]) + self.dbapi.execute( + """INSERT INTO note (handle, gramps_id, blob_data) + VALUES(?, ?, ?);""", + [note.handle, note.gramps_id, pickle.dumps(note.serialize())]) self.update_secondary_values(note) if not trans.batch: self.update_backlinks(note) - op = TXNUPD if old_note else TXNADD - trans.add(NOTE_KEY, op, note.handle, + db_op = TXNUPD if old_note else TXNADD + trans.add(NOTE_KEY, db_op, note.handle, old_note, note.serialize()) # Misc updates: @@ -925,6 +1065,10 @@ class DBAPI(DbGeneric): self.note_types.add(str(note.type)) def commit_place(self, place, trans, change_time=None): + """ + Commit the specified Place to the database, storing the changes as + part of the transaction. + """ old_place = None place.change = int(change_time or time.time()) if place.handle in self.place_map: @@ -938,17 +1082,18 @@ class DBAPI(DbGeneric): pickle.dumps(place.serialize()), place.handle]) else: - self.dbapi.execute("""INSERT INTO place (handle, order_by, gramps_id, blob_data) - VALUES(?, ?, ?, ?);""", - [place.handle, - self._order_by_place_key(place), - place.gramps_id, - pickle.dumps(place.serialize())]) + self.dbapi.execute( + """INSERT INTO place (handle, order_by, gramps_id, blob_data) + VALUES(?, ?, ?, ?);""", + [place.handle, + self._order_by_place_key(place), + place.gramps_id, + pickle.dumps(place.serialize())]) self.update_secondary_values(place) if not trans.batch: self.update_backlinks(place) - op = TXNUPD if old_place else TXNADD - trans.add(PLACE_KEY, op, place.handle, + db_op = TXNUPD if old_place else TXNADD + trans.add(PLACE_KEY, db_op, place.handle, old_place, place.serialize()) # Misc updates: @@ -965,6 +1110,10 @@ class DBAPI(DbGeneric): self.media_attributes.update(attr_list) def commit_event(self, event, trans, change_time=None): + """ + Commit the specified Event to the database, storing the changes as + part of the transaction. + """ old_event = None event.change = int(change_time or time.time()) if event.handle in self.event_map: @@ -976,16 +1125,17 @@ class DBAPI(DbGeneric): pickle.dumps(event.serialize()), event.handle]) else: - self.dbapi.execute("""INSERT INTO event (handle, gramps_id, blob_data) - VALUES(?, ?, ?);""", - [event.handle, - event.gramps_id, - pickle.dumps(event.serialize())]) + self.dbapi.execute( + """INSERT INTO event (handle, gramps_id, blob_data) + VALUES(?, ?, ?);""", + [event.handle, + event.gramps_id, + pickle.dumps(event.serialize())]) self.update_secondary_values(event) if not trans.batch: self.update_backlinks(event) - op = TXNUPD if old_event else TXNADD - trans.add(EVENT_KEY, op, event.handle, + db_op = TXNUPD if old_event else TXNADD + trans.add(EVENT_KEY, db_op, event.handle, old_event, event.serialize()) # Misc updates: @@ -1001,6 +1151,10 @@ class DBAPI(DbGeneric): self.media_attributes.update(attr_list) def commit_tag(self, tag, trans, change_time=None): + """ + Commit the specified Tag to the database, storing the changes as + part of the transaction. + """ tag.change = int(change_time or time.time()) if tag.handle in self.tag_map: self.dbapi.execute("""UPDATE tag SET blob_data = ?, @@ -1011,14 +1165,18 @@ class DBAPI(DbGeneric): tag.handle]) else: self.dbapi.execute("""INSERT INTO tag (handle, order_by, blob_data) - VALUES(?, ?, ?);""", - [tag.handle, - self._order_by_tag_key(tag.name), - pickle.dumps(tag.serialize())]) + VALUES(?, ?, ?);""", + [tag.handle, + self._order_by_tag_key(tag.name), + pickle.dumps(tag.serialize())]) if not trans.batch: self.update_backlinks(tag) def commit_media(self, media, trans, change_time=None): + """ + Commit the specified Media to the database, storing the changes + as part of the transaction. + """ old_media = None media.change = int(change_time or time.time()) if media.handle in self.media_map: @@ -1032,17 +1190,18 @@ class DBAPI(DbGeneric): pickle.dumps(media.serialize()), media.handle]) else: - self.dbapi.execute("""INSERT INTO media (handle, order_by, gramps_id, blob_data) - VALUES(?, ?, ?, ?);""", - [media.handle, - self._order_by_media_key(media), - media.gramps_id, - pickle.dumps(media.serialize())]) + self.dbapi.execute( + """INSERT INTO media (handle, order_by, gramps_id, blob_data) + VALUES(?, ?, ?, ?);""", + [media.handle, + self._order_by_media_key(media), + media.gramps_id, + pickle.dumps(media.serialize())]) self.update_secondary_values(media) if not trans.batch: self.update_backlinks(media) - op = TXNUPD if old_media else TXNADD - trans.add(MEDIA_KEY, op, media.handle, + db_op = TXNUPD if old_media else TXNADD + trans.add(MEDIA_KEY, db_op, media.handle, old_media, media.serialize()) # Misc updates: @@ -1100,8 +1259,9 @@ class DBAPI(DbGeneric): if self.readonly or not handle: return if handle in data_map: - self.dbapi.execute("DELETE FROM %s WHERE handle = ?;" % key2table[key], - [handle]) + self.dbapi.execute( + "DELETE FROM %s WHERE handle = ?;" % key2table[key], + [handle]) if not transaction.batch: data = data_map[handle] transaction.add(key, TXNDEL, handle, data, None) @@ -1125,14 +1285,18 @@ class DBAPI(DbGeneric): """ if isinstance(handle, bytes): handle = str(handle, "utf-8") - self.dbapi.execute("SELECT obj_class, obj_handle FROM reference WHERE ref_handle = ?;", - [handle]) + self.dbapi.execute( + "SELECT obj_class, obj_handle FROM reference WHERE ref_handle = ?;", + [handle]) rows = self.dbapi.fetchall() for row in rows: if (include_classes is None) or (row[0] in include_classes): yield (row[0], row[1]) def find_initial_person(self): + """ + Returns first person in the database + """ handle = self.get_default_handle() person = None if handle: @@ -1155,25 +1319,32 @@ class DBAPI(DbGeneric): self.dbapi.execute(query) rows = self.dbapi.fetchall() for row in rows: - obj = self.get_table_func(class_.__name__,"class_func").create(pickle.loads(row[0])) - # just use values and handle to keep small: - sorted_items.append((eval_order_by(order_by, obj, self), obj.handle)) + obj = self.get_table_func(class_.__name__, + "class_func").create(pickle.loads(row[0])) + # just use values and handle to keep small: + sorted_items.append((eval_order_by(order_by, obj, self), + obj.handle)) # next we sort by fields and direction pos = len(order_by) - 1 for (field, order) in reversed(order_by): # sort the lasts parts first - sorted_items.sort(key=itemgetter(pos), reverse=(order=="DESC")) + sorted_items.sort(key=itemgetter(pos), reverse=(order == "DESC")) pos -= 1 # now we will look them up again: for (order_by_values, handle) in sorted_items: - yield self.get_table_func(class_.__name__,"handle_func")(handle) + yield self.get_table_func(class_.__name__, "handle_func")(handle) def iter_items(self, order_by, class_): + """ + Iterate over items in a class, possibly ordered by + a list of field names and direction ("ASC" or "DESC"). + """ # check if order_by fields are secondary # if so, fine # else, use Python sorts if order_by: secondary_fields = class_.get_secondary_fields() - if not self._check_order_by_fields(class_.__name__, order_by, secondary_fields): + if not self._check_order_by_fields(class_.__name__, + order_by, secondary_fields): for item in self.iter_items_order_by_python(order_by, class_): yield item return @@ -1181,75 +1352,113 @@ class DBAPI(DbGeneric): if order_by is None: query = "SELECT blob_data FROM %s;" % class_.__name__.lower() else: - order_phrases = ["%s %s" % (self._hash_name(class_.__name__, class_.get_field_alias(field)), direction) - for (field, direction) in order_by] - query = "SELECT blob_data FROM %s ORDER BY %s;" % (class_.__name__.lower(), ", ".join(order_phrases)) + order_phrases = [ + "%s %s" % (self._hash_name(class_.__name__, + class_.get_field_alias(field)), + direction) + for (field, direction) in order_by] + query = "SELECT blob_data FROM %s ORDER BY %s;" % ( + class_.__name__.lower(), ", ".join(order_phrases)) self.dbapi.execute(query) rows = self.dbapi.fetchall() for row in rows: yield class_.create(pickle.loads(row[0])) def iter_person_handles(self): + """ + Return an iterator over handles for Persons in the database + """ self.dbapi.execute("SELECT handle FROM person;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_family_handles(self): + """ + Return an iterator over handles for Families in the database + """ self.dbapi.execute("SELECT handle FROM family;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_citation_handles(self): + """ + Return an iterator over database handles, one handle for each Citation + in the database. + """ self.dbapi.execute("SELECT handle FROM citation;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_event_handles(self): + """ + Return an iterator over handles for Events in the database + """ self.dbapi.execute("SELECT handle FROM event;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_media_handles(self): + """ + Return an iterator over handles for Media in the database + """ self.dbapi.execute("SELECT handle FROM media;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_note_handles(self): + """ + Return an iterator over handles for Notes in the database + """ self.dbapi.execute("SELECT handle FROM note;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_place_handles(self): + """ + Return an iterator over handles for Places in the database + """ self.dbapi.execute("SELECT handle FROM place;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_repository_handles(self): + """ + Return an iterator over handles for Repositories in the database + """ self.dbapi.execute("SELECT handle FROM repository;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_source_handles(self): + """ + Return an iterator over handles for Sources in the database + """ self.dbapi.execute("SELECT handle FROM source;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def iter_tag_handles(self): + """ + Return an iterator over handles for Tags in the database + """ self.dbapi.execute("SELECT handle FROM tag;") rows = self.dbapi.fetchall() for row in rows: yield row[0] def reindex_reference_map(self, callback): + """ + Reindex all primary records in the database. + """ callback(4) self.dbapi.execute("DELETE FROM reference;") primary_table = ( @@ -1267,23 +1476,27 @@ class DBAPI(DbGeneric): # Now we use the functions and classes defined above # to loop through each of the primary object tables. for cursor_func, class_func in primary_table: - logging.info("Rebuilding %s reference map" % - class_func.__name__) + logging.info("Rebuilding %s reference map", class_func.__name__) with cursor_func() as cursor: for found_handle, val in cursor: obj = class_func.create(val) references = set(obj.get_referenced_handles_recursively()) # handle addition of new references for (ref_class_name, ref_handle) in references: - self.dbapi.execute("""INSERT INTO reference (obj_handle, obj_class, ref_handle, ref_class) - VALUES(?, ?, ?, ?);""", - [obj.handle, - obj.__class__.__name__, - ref_handle, - ref_class_name]) + self.dbapi.execute( + """INSERT INTO reference (obj_handle, obj_class, + ref_handle, ref_class) + VALUES(?, ?, ?, ?);""", + [obj.handle, + obj.__class__.__name__, + ref_handle, + ref_class_name]) callback(5) def rebuild_secondary(self, update): + """ + Rebuild secondary indices + """ # First, expand blob to individual fields: self.rebuild_secondary_fields() # Next, rebuild stats: @@ -1296,8 +1509,9 @@ class DBAPI(DbGeneric): while row: place = Place.create(pickle.loads(row[0])) order_by = self._order_by_place_key(place) - cur2 = self.dbapi.execute("""UPDATE place SET order_by = ? WHERE handle = ?;""", - [order_by, place.handle]) + cur2 = self.dbapi.execute( + """UPDATE place SET order_by = ? WHERE handle = ?;""", + [order_by, place.handle]) row = self.dbapi.fetchone() ## Rebuild person order_by: self.dbapi.execute("""select blob_data from person;""") @@ -1305,8 +1519,9 @@ class DBAPI(DbGeneric): while row: person = Person.create(pickle.loads(row[0])) order_by = self._order_by_person_key(person) - cur2 = self.dbapi.execute("""UPDATE person SET order_by = ? WHERE handle = ?;""", - [order_by, person.handle]) + cur2 = self.dbapi.execute( + """UPDATE person SET order_by = ? WHERE handle = ?;""", + [order_by, person.handle]) row = self.dbapi.fetchone() ## Rebuild citation order_by: self.dbapi.execute("""select blob_data from citation;""") @@ -1314,8 +1529,9 @@ class DBAPI(DbGeneric): while row: citation = Citation.create(pickle.loads(row[0])) order_by = self._order_by_citation_key(citation) - cur2 = self.dbapi.execute("""UPDATE citation SET order_by = ? WHERE handle = ?;""", - [order_by, citation.handle]) + cur2 = self.dbapi.execute( + """UPDATE citation SET order_by = ? WHERE handle = ?;""", + [order_by, citation.handle]) row = self.dbapi.fetchone() ## Rebuild source order_by: self.dbapi.execute("""select blob_data from source;""") @@ -1323,8 +1539,9 @@ class DBAPI(DbGeneric): while row: source = Source.create(pickle.loads(row[0])) order_by = self._order_by_source_key(source) - cur2 = self.dbapi.execute("""UPDATE source SET order_by = ? WHERE handle = ?;""", - [order_by, source.handle]) + cur2 = self.dbapi.execute( + """UPDATE source SET order_by = ? WHERE handle = ?;""", + [order_by, source.handle]) row = self.dbapi.fetchone() ## Rebuild tag order_by: self.dbapi.execute("""select blob_data from tag;""") @@ -1332,8 +1549,9 @@ class DBAPI(DbGeneric): while row: tag = Tag.create(pickle.loads(row[0])) order_by = self._order_by_tag_key(tag.name) - cur2 = self.dbapi.execute("""UPDATE tag SET order_by = ? WHERE handle = ?;""", - [order_by, tag.handle]) + cur2 = self.dbapi.execute( + """UPDATE tag SET order_by = ? WHERE handle = ?;""", + [order_by, tag.handle]) row = self.dbapi.fetchone() ## Rebuild media order_by: self.dbapi.execute("""select blob_data from media;""") @@ -1341,8 +1559,9 @@ class DBAPI(DbGeneric): while row: media = Media.create(pickle.loads(row[0])) order_by = self._order_by_media_key(media) - cur2 = self.dbapi.execute("""UPDATE media SET order_by = ? WHERE handle = ?;""", - [order_by, media.handle]) + cur2 = self.dbapi.execute( + """UPDATE media SET order_by = ? WHERE handle = ?;""", + [order_by, media.handle]) row = self.dbapi.fetchone() def has_handle_for_person(self, key): @@ -1434,7 +1653,8 @@ class DBAPI(DbGeneric): return self.dbapi.fetchone() != None def has_gramps_id_for_repository(self, key): - self.dbapi.execute("SELECT 1 FROM repository WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT 1 FROM repository WHERE gramps_id = ?", [key]) return self.dbapi.fetchone() != None def has_gramps_id_for_note(self, key): @@ -1489,13 +1709,15 @@ class DBAPI(DbGeneric): def _get_raw_person_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM person WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM person WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_person_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM person WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM person WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1503,13 +1725,15 @@ class DBAPI(DbGeneric): def _get_raw_family_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM family WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM family WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_family_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM family WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM family WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1517,13 +1741,15 @@ class DBAPI(DbGeneric): def _get_raw_source_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM source WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM source WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_source_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM source WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM source WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1531,13 +1757,15 @@ class DBAPI(DbGeneric): def _get_raw_citation_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM citation WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM citation WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_citation_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM citation WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM citation WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1545,13 +1773,15 @@ class DBAPI(DbGeneric): def _get_raw_event_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM event WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM event WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_event_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM event WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM event WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1559,13 +1789,15 @@ class DBAPI(DbGeneric): def _get_raw_media_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM media WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM media WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_media_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM media WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM media WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1573,13 +1805,15 @@ class DBAPI(DbGeneric): def _get_raw_place_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM place WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM place WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_place_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM place WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM place WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1587,7 +1821,8 @@ class DBAPI(DbGeneric): def _get_raw_repository_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM repository WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM repository WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1595,7 +1830,8 @@ class DBAPI(DbGeneric): def _get_raw_repository_from_id_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM repository WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM repository WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1603,13 +1839,15 @@ class DBAPI(DbGeneric): def _get_raw_note_data(self, key): if isinstance(key, bytes): key = str(key, "utf-8") - self.dbapi.execute("SELECT blob_data FROM note WHERE handle = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM note WHERE handle = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) def _get_raw_note_from_id_data(self, key): - self.dbapi.execute("SELECT blob_data FROM note WHERE gramps_id = ?", [key]) + self.dbapi.execute( + "SELECT blob_data FROM note WHERE gramps_id = ?", [key]) row = self.dbapi.fetchone() if row: return pickle.loads(row[0]) @@ -1653,7 +1891,8 @@ class DBAPI(DbGeneric): Returns a dictionary of {given_name: (male_count, female_count, unknown_count)} """ - self.dbapi.execute("""SELECT given_name, female, male, unknown FROM gender_stats;""") + self.dbapi.execute( + """SELECT given_name, female, male, unknown FROM gender_stats;""") gstats = {} for row in self.dbapi.fetchall(): gstats[row[0]] = (row[1], row[2], row[3]) @@ -1663,12 +1902,17 @@ class DBAPI(DbGeneric): self.dbapi.execute("""DELETE FROM gender_stats;""") for key in gstats.stats: female, male, unknown = gstats.stats[key] - self.dbapi.execute("""INSERT INTO gender_stats(given_name, female, male, unknown) - VALUES(?, ?, ?, ?);""", - [key, female, male, unknown]); + self.dbapi.execute( + """INSERT INTO gender_stats(given_name, female, male, unknown) + VALUES(?, ?, ?, ?);""", + [key, female, male, unknown]) def get_surname_list(self): - self.dbapi.execute("""SELECT DISTINCT surname FROM person ORDER BY surname;""") + """ + Return the list of locale-sorted surnames contained in the database. + """ + self.dbapi.execute( + """SELECT DISTINCT surname FROM person ORDER BY surname;""") surname_list = [] for row in self.dbapi.fetchall(): surname_list.append(row[0]) @@ -1731,37 +1975,48 @@ class DBAPI(DbGeneric): """ LOG.info("Rebuilding secondary fields...") for table in self.get_table_func(): - if not hasattr(self.get_table_func(table,"class_func"), "get_secondary_fields"): + if not hasattr(self.get_table_func(table, "class_func"), + "get_secondary_fields"): continue - # do a select on all; if it works, then it is ok; else, check them all + # do a select on all; if it works, then it is ok; + # else, check them all table_name = table.lower() try: - fields = [self._hash_name(table, field) for (field, ptype) in - self.get_table_func(table,"class_func").get_secondary_fields()] + fields = [self._hash_name(table, field) + for (field, ptype) + in self.get_table_func( + table, "class_func").get_secondary_fields()] if fields: - self.dbapi.execute("select %s from %s limit 1;" % (", ".join(fields), table_name)) + self.dbapi.execute("select %s from %s limit 1;" + % (", ".join(fields), table_name)) # if no error, continue - LOG.info("Table %s is up to date" % table) + LOG.info("Table %s is up to date", table) continue except: pass # got to add missing ones, so continue - LOG.info("Table %s needs rebuilding..." % table) + LOG.info("Table %s needs rebuilding...", table) altered = False - for field_pair in self.get_table_func(table,"class_func").get_secondary_fields(): + for field_pair in self.get_table_func( + table, "class_func").get_secondary_fields(): field, python_type = field_pair field = self._hash_name(table, field) sql_type = self._sql_type(python_type) try: # test to see if it exists: - self.dbapi.execute("SELECT %s FROM %s LIMIT 1;" % (field, table_name)) - LOG.info(" Table %s, field %s is up to date" % (table, field)) + self.dbapi.execute("SELECT %s FROM %s LIMIT 1;" + % (field, table_name)) + LOG.info(" Table %s, field %s is up to date", + table, field) except: # if not, let's add it - LOG.info(" Table %s, field %s was added" % (table, field)) - self.dbapi.execute("ALTER TABLE %s ADD COLUMN %s %s;" % (table_name, field, sql_type)) + LOG.info(" Table %s, field %s was added", + table, field) + self.dbapi.execute("ALTER TABLE %s ADD COLUMN %s %s;" + % (table_name, field, sql_type)) altered = True if altered: - LOG.info("Table %s is being committed, rebuilt, and indexed..." % table) + LOG.info("Table %s is being committed, " + "rebuilt, and indexed...", table) self.update_secondary_values_table(table) self.create_secondary_indexes_table(table) @@ -1770,7 +2025,8 @@ class DBAPI(DbGeneric): Create the indexes for the secondary fields. """ for table in self.get_table_func(): - if not hasattr(self.get_table_func(table,"class_func"), "get_index_fields"): + if not hasattr(self.get_table_func(table, "class_func"), + "get_index_fields"): continue self.create_secondary_indexes_table(table) @@ -1779,9 +2035,11 @@ class DBAPI(DbGeneric): Create secondary indexes for just this table. """ table_name = table.lower() - for field in self.get_table_func(table,"class_func").get_index_fields(): + for field in self.get_table_func( + table, "class_func").get_index_fields(): field = self._hash_name(table, field) - self.dbapi.try_execute("CREATE INDEX %s_%s ON %s(%s);" % (table, field, table_name, field)) + self.dbapi.try_execute("CREATE INDEX %s_%s ON %s(%s);" + % (table, field, table_name, field)) def update_secondary_values_all(self): """ @@ -1797,18 +2055,20 @@ class DBAPI(DbGeneric): field values. table - "Person", "Place", "Media", etc. """ - if not hasattr(self.get_table_func(table,"class_func"), "get_secondary_fields"): + if not hasattr(self.get_table_func(table, "class_func"), + "get_secondary_fields"): return - for item in self.get_table_func(table,"iter_func")(): + for item in self.get_table_func(table, "iter_func")(): self.update_secondary_values(item) def update_secondary_values(self, item): """ - Given a primary object update its secondary field values in the database. + Given a primary object update its secondary field values + in the database. Does not commit. """ table = item.__class__.__name__ - fields = self.get_table_func(table,"class_func").get_secondary_fields() + fields = self.get_table_func(table, "class_func").get_secondary_fields() fields = [field for (field, direction) in fields] sets = [] values = [] @@ -1819,8 +2079,10 @@ class DBAPI(DbGeneric): values.append(value) if len(values) > 0: table_name = table.lower() - self.dbapi.execute("UPDATE %s SET %s where handle = ?;" % (table_name, ", ".join(sets)), - self._sql_cast_list(table, sets, values) + [item.handle]) + self.dbapi.execute("UPDATE %s SET %s where handle = ?;" + % (table_name, ", ".join(sets)), + self._sql_cast_list(table, sets, values) + + [item.handle]) def _sql_cast_list(self, table, fields, values): """ @@ -1852,14 +2114,16 @@ class DBAPI(DbGeneric): if where is None: return "" elif len(where) == 3: - field, op, value = where - return "(%s %s %s)" % (self._hash_name(table, field), op, self._sql_repr(value)) + field, db_op, value = where + return "(%s %s %s)" % (self._hash_name(table, field), + db_op, self._sql_repr(value)) elif where[0] in ["AND", "OR"]: parts = [self._build_where_clause_recursive(table, part) for part in where[1]] return "(%s)" % ((" %s " % where[0]).join(parts)) else: - return "(NOT %s)" % self._build_where_clause_recursive(table, where[1]) + return "(NOT %s)" % self._build_where_clause_recursive(table, + where[1]) def _build_where_clause(self, table, where): """ @@ -1868,7 +2132,7 @@ class DBAPI(DbGeneric): """ parts = self._build_where_clause_recursive(table, where) if parts: - return ("WHERE " + parts) + return "WHERE " + parts else: return "" @@ -1877,8 +2141,9 @@ class DBAPI(DbGeneric): order_by - [(field, "ASC" | "DESC"), ...] """ if order_by: - order_clause = ", ".join(["%s %s" % (self._hash_name(table, field), dir) - for (field, dir) in order_by]) + order_clause = ", ".join(["%s %s" % (self._hash_name(table, field), + dir) + for (field, dir) in order_by]) return "ORDER BY " + order_clause else: return "" @@ -1888,11 +2153,13 @@ class DBAPI(DbGeneric): fields - [field, ...] return: "field, field, field" """ - all_available = all([(field in secondary_fields) for field in select_fields]) + all_available = all([(field in secondary_fields) + for field in select_fields]) if all_available: # we can get them without expanding return select_fields else: - return ["blob_data"] # nope, we'll have to expand blob to get all fields + # nope, we'll have to expand blob to get all fields + return ["blob_data"] def _check_order_by_fields(self, table, order_by, secondary_fields): """ @@ -1902,7 +2169,7 @@ class DBAPI(DbGeneric): secondary_fields are hashed. """ if order_by: - for (field, dir) in order_by: + for (field, directory) in order_by: if self._hash_name(table, field) not in secondary_fields: return False return True @@ -1920,16 +2187,17 @@ class DBAPI(DbGeneric): connector, exprs = where if connector in ["AND", "OR"]: for expr in exprs: - value = self._check_where_fields(table, expr, secondary_fields) + value = self._check_where_fields(table, expr, + secondary_fields) if value == False: return False return True else: # "NOT" return self._check_where_fields(table, exprs, secondary_fields) - elif len(where) == 3: # (name, op, value) - (name, op, value) = where + elif len(where) == 3: # (name, db_op, value) + (name, db_op, value) = where # just the ones we need for where - return (self._hash_name(table, name) in secondary_fields) + return self._hash_name(table, name) in secondary_fields def _select(self, table, fields=None, start=0, limit=-1, where=None, order_by=None): @@ -1948,16 +2216,21 @@ class DBAPI(DbGeneric): ["NOT", where] order_by - [[fieldname, "ASC" | "DESC"], ...] """ - secondary_fields = ([self._hash_name(table, field) for (field, ptype) in - self.get_table_func(table,"class_func").get_secondary_fields()] + - ["handle"]) # handle is a sql field, but not listed in secondaries + secondary_fields = ([self._hash_name(table, field) + for (field, ptype) + in self.get_table_func( + table, "class_func").get_secondary_fields()] + + ["handle"]) + # handle is a sql field, but not listed in secondaries # If no fields, then we need objects: # Check to see if where matches SQL fields: table_name = table.lower() - if ((not self._check_where_fields(table, where, secondary_fields)) or - (not self._check_order_by_fields(table, order_by, secondary_fields))): + if ((not self._check_where_fields(table, where, secondary_fields)) + or (not self._check_order_by_fields(table, order_by, + secondary_fields))): # If not, then need to do select via Python: - generator = super()._select(table, fields, start, limit, where, order_by) + generator = super()._select(table, fields, start, + limit, where, order_by) for item in generator: yield item return @@ -1973,21 +2246,25 @@ class DBAPI(DbGeneric): else: hashed_fields = [self._hash_name(table, field) for field in fields] fields = hashed_fields - select_fields = self._build_select_fields(table, fields, secondary_fields) + select_fields = self._build_select_fields(table, fields, + secondary_fields) where_clause = self._build_where_clause(table, where) order_clause = self._build_order_clause(table, order_by) if get_count_only: select_fields = ["1"] if start: query = "SELECT %s FROM %s %s %s LIMIT %s, %s " % ( - ", ".join(select_fields), table_name, where_clause, order_clause, start, limit + ", ".join(select_fields), + table_name, where_clause, order_clause, start, limit ) else: query = "SELECT %s FROM %s %s %s LIMIT %s" % ( - ", ".join(select_fields), table_name, where_clause, order_clause, limit + ", ".join(select_fields), + table_name, where_clause, order_clause, limit ) if get_count_only: - self.dbapi.execute("SELECT count(1) from (%s) AS temp_select;" % query) + self.dbapi.execute("SELECT count(1) from (%s) AS temp_select;" + % query) rows = self.dbapi.fetchall() yield rows[0][0] return @@ -1999,17 +2276,24 @@ class DBAPI(DbGeneric): data = {} for field in fields: if field in select_fields: - data[field.replace("__", ".")] = row[select_fields.index(field)] + data[field.replace("__", ".") + ] = row[select_fields.index(field)] else: if obj is None: # we need it! create it and cache it: - obj = self.get_table_func(table,"class_func").create(pickle.loads(row[0])) + obj = self.get_table_func(table, + "class_func").create( + pickle.loads(row[0])) # get the field, even if we need to do a join: - # FIXME: possible optimize: do a join in select for this if needed: + # FIXME: possible optimize: + # do a join in select for this if needed: field = field.replace("__", ".") - data[field] = obj.get_field(field, self, ignore_errors=True) + data[field] = obj.get_field(field, self, + ignore_errors=True) yield data else: - obj = self.get_table_func(table,"class_func").create(pickle.loads(row[0])) + obj = self.get_table_func(table, + "class_func").create( + pickle.loads(row[0])) yield obj def get_summary(self):