Merge pull request #113 from sam-m888/useWithOpen

Prefer with to open files
This commit is contained in:
Doug Blank 2016-04-25 21:35:23 -04:00
commit b526a41af1
11 changed files with 296 additions and 352 deletions

View File

@ -234,9 +234,8 @@ class CLIDbManager(object):
except: except:
version = (0, 0, 0) version = (0, 0, 0)
if os.path.isfile(path_name): if os.path.isfile(path_name):
file = open(path_name, 'r', encoding='utf8') with open(path_name, 'r', encoding='utf8') as file:
name = file.readline().strip() name = file.readline().strip()
file.close()
(tval, last) = time_val(dirpath) (tval, last) = time_val(dirpath)
(enable, stock_id) = self.icon_values(dirpath, self.active, (enable, stock_id) = self.icon_values(dirpath, self.active,
@ -293,9 +292,8 @@ class CLIDbManager(object):
name_list = [ name[0] for name in self.current_names ] name_list = [ name[0] for name in self.current_names ]
title = find_next_db_name(name_list) title = find_next_db_name(name_list)
name_file = open(path_name, "w", encoding='utf8') with open(path_name, "w", encoding='utf8') as name_file:
name_file.write(title) name_file.write(title)
name_file.close()
if create_db: if create_db:
# write the version number into metadata # write the version number into metadata
@ -409,9 +407,8 @@ class CLIDbManager(object):
dirpath = os.path.join(dbdir, dpath) dirpath = os.path.join(dbdir, dpath)
path_name = os.path.join(dirpath, NAME_FILE) path_name = os.path.join(dirpath, NAME_FILE)
if os.path.isfile(path_name): if os.path.isfile(path_name):
file = open(path_name, 'r', encoding='utf8') with open(path_name, 'r', encoding='utf8') as file:
name = file.readline().strip() name = file.readline().strip()
file.close()
if re.match("^" + dbname + "$", name): if re.match("^" + dbname + "$", name):
match_list.append((name, dirpath)) match_list.append((name, dirpath))
if len(match_list) == 0: if len(match_list) == 0:
@ -438,12 +435,10 @@ class CLIDbManager(object):
Returns old_name, new_name if success, None, None if no success Returns old_name, new_name if success, None, None if no success
""" """
try: try:
name_file = open(filepath, "r", encoding='utf8') with open(filepath, "r", encoding='utf8') as name_file:
old_text=name_file.read() old_text=name_file.read()
name_file.close() with open(filepath, "w", encoding='utf8') as name_file:
name_file = open(filepath, "w", encoding='utf8') name_file.write(new_text)
name_file.write(new_text)
name_file.close()
except (OSError, IOError) as msg: except (OSError, IOError) as msg:
CLIDbManager.ERROR(_("Could not rename Family Tree"), CLIDbManager.ERROR(_("Could not rename Family Tree"),
str(msg)) str(msg))
@ -543,11 +538,10 @@ def find_locker_name(dirpath):
""" """
try: try:
fname = os.path.join(dirpath, "lock") fname = os.path.join(dirpath, "lock")
ifile = open(fname, 'r', encoding='utf8') with open(fname, 'r', encoding='utf8') as ifile:
username = ifile.read().strip() username = ifile.read().strip()
# feature request 2356: avoid genitive form # feature request 2356: avoid genitive form
last = _("Locked by %s") % username last = _("Locked by %s") % username
ifile.close()
except (OSError, IOError, UnicodeDecodeError): except (OSError, IOError, UnicodeDecodeError):
last = _("Unknown") last = _("Unknown")
return last return last

View File

@ -203,26 +203,23 @@ class DbState(Callback):
dirpath = os.path.join(dbdir, dpath) dirpath = os.path.join(dbdir, dpath)
path_name = os.path.join(dirpath, "name.txt") path_name = os.path.join(dirpath, "name.txt")
if os.path.isfile(path_name): if os.path.isfile(path_name):
file = open(path_name, 'r', encoding='utf8') with open(path_name, 'r', encoding='utf8') as file:
name = file.readline().strip() name = file.readline().strip()
file.close()
if dbname == name: if dbname == name:
locked = False locked = False
locked_by = None locked_by = None
backend = None backend = None
fname = os.path.join(dirpath, "database.txt") fname = os.path.join(dirpath, "database.txt")
if os.path.isfile(fname): if os.path.isfile(fname):
ifile = open(fname, 'r', encoding='utf8') with open(fname, 'r', encoding='utf8') as ifile:
backend = ifile.read().strip() backend = ifile.read().strip()
ifile.close()
else: else:
backend = "bsddb" backend = "bsddb"
try: try:
fname = os.path.join(dirpath, "lock") fname = os.path.join(dirpath, "lock")
ifile = open(fname, 'r', encoding='utf8') with open(fname, 'r', encoding='utf8') as ifile:
locked_by = ifile.read().strip() locked_by = ifile.read().strip()
locked = True locked = True
ifile.close()
except (OSError, IOError): except (OSError, IOError):
pass pass
return (dirpath, locked, locked_by, backend) return (dirpath, locked, locked_by, backend)

View File

@ -103,9 +103,8 @@ class FilterList(object):
if os.path.isfile(self.file): if os.path.isfile(self.file):
parser = make_parser() parser = make_parser()
parser.setContentHandler(FilterParser(self)) parser.setContentHandler(FilterParser(self))
the_file = open(self.file, 'r', encoding='utf8') with open(self.file, 'r', encoding='utf8') as the_file:
parser.parse(the_file) parser.parse(the_file)
the_file.close()
except (IOError, OSError): except (IOError, OSError):
print("IO/OSError in _filterlist.py") print("IO/OSError in _filterlist.py")
except SAXParseException: except SAXParseException:

View File

@ -603,9 +603,8 @@ class GVDotDoc(GVDocBase):
if self._filename[-3:] != ".gv": if self._filename[-3:] != ".gv":
self._filename += ".gv" self._filename += ".gv"
dotfile = open(self._filename, "wb") with open(self._filename, "wb") as dotfile:
dotfile.write(self._dot.getvalue()) dotfile.write(self._dot.getvalue())
dotfile.close()
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# #

View File

@ -146,31 +146,30 @@ class StyleSheetList(object):
""" """
Saves the current StyleSheet definitions to the associated file. Saves the current StyleSheet definitions to the associated file.
""" """
xml_file = open(self.__file, "w") with open(self.__file, "w") as xml_file:
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n') xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
xml_file.write('<stylelist>\n') xml_file.write('<stylelist>\n')
for name in sorted(self.map.keys()): # enable diff of archived copies for name in sorted(self.map.keys()): # enable diff of archived copies
if name == "default": if name == "default":
continue continue
sheet = self.map[name] sheet = self.map[name]
xml_file.write('<sheet name="%s">\n' % escxml(name)) xml_file.write('<sheet name="%s">\n' % escxml(name))
for p_name in sheet.get_paragraph_style_names(): for p_name in sheet.get_paragraph_style_names():
self.write_paragraph_style(xml_file, sheet, p_name) self.write_paragraph_style(xml_file, sheet, p_name)
for t_name in sheet.get_table_style_names(): for t_name in sheet.get_table_style_names():
self.write_table_style(xml_file, sheet, t_name) self.write_table_style(xml_file, sheet, t_name)
for c_name in sheet.get_cell_style_names(): for c_name in sheet.get_cell_style_names():
self.write_cell_style(xml_file, sheet, c_name) self.write_cell_style(xml_file, sheet, c_name)
for g_name in sheet.get_draw_style_names(): for g_name in sheet.get_draw_style_names():
self.write_graphics_style(xml_file, sheet, g_name) self.write_graphics_style(xml_file, sheet, g_name)
xml_file.write('</sheet>\n') xml_file.write('</sheet>\n')
xml_file.write('</stylelist>\n') xml_file.write('</stylelist>\n')
xml_file.close()
def write_paragraph_style(self, xml_file, sheet, p_name): def write_paragraph_style(self, xml_file, sheet, p_name):
@ -275,9 +274,8 @@ class StyleSheetList(object):
if os.path.isfile(self.__file): if os.path.isfile(self.__file):
parser = make_parser() parser = make_parser()
parser.setContentHandler(SheetParser(self)) parser.setContentHandler(SheetParser(self))
the_file = open(self.__file) with open(self.__file) as the_file:
parser.parse(the_file) parser.parse(the_file)
the_file.close()
except (IOError, OSError, SAXParseException): except (IOError, OSError, SAXParseException):
pass pass

View File

@ -458,68 +458,67 @@ class BookList(object):
""" """
Saves the current BookList to the associated file. Saves the current BookList to the associated file.
""" """
f = open(self.file, "w") with open(self.file, "w") as f:
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n") f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
f.write('<booklist>\n') f.write('<booklist>\n')
for name in sorted(self.bookmap): # enable a diff of archived copies for name in sorted(self.bookmap): # enable a diff of archived copies
book = self.get_book(name) book = self.get_book(name)
dbname = book.get_dbname() dbname = book.get_dbname()
f.write(' <book name="%s" database="%s">\n' % (name, dbname) ) f.write(' <book name="%s" database="%s">\n' % (name, dbname) )
for item in book.get_item_list(): for item in book.get_item_list():
f.write(' <item name="%s" trans_name="%s">\n' % f.write(' <item name="%s" trans_name="%s">\n' %
(item.get_name(), item.get_translated_name() ) ) (item.get_name(), item.get_translated_name() ) )
options = item.option_class.handler.options_dict options = item.option_class.handler.options_dict
for option_name in sorted(options.keys()): # enable a diff for option_name in sorted(options.keys()): # enable a diff
option_value = options[option_name] option_value = options[option_name]
if isinstance(option_value, (list, tuple)): if isinstance(option_value, (list, tuple)):
f.write(' <option name="%s" value="" ' f.write(' <option name="%s" value="" '
'length="%d">\n' % ( 'length="%d">\n' % (
escape(option_name), escape(option_name),
len(options[option_name]) ) ) len(options[option_name]) ) )
for list_index in range(len(option_value)): for list_index in range(len(option_value)):
option_type = type_name(option_value[list_index]) option_type = type_name(option_value[list_index])
value = escape(str(option_value[list_index])) value = escape(str(option_value[list_index]))
value = value.replace('"', '&quot;')
f.write(' <listitem number="%d" type="%s" '
'value="%s"/>\n' % (
list_index,
option_type,
value ) )
f.write(' </option>\n')
else:
option_type = type_name(option_value)
value = escape(str(option_value))
value = value.replace('"', '&quot;') value = value.replace('"', '&quot;')
f.write(' <listitem number="%d" type="%s" ' f.write(' <option name="%s" type="%s" '
'value="%s"/>\n' % ( 'value="%s"/>\n' % (
list_index, escape(option_name),
option_type, option_type,
value ) ) value) )
f.write(' </option>\n')
else:
option_type = type_name(option_value)
value = escape(str(option_value))
value = value.replace('"', '&quot;')
f.write(' <option name="%s" type="%s" '
'value="%s"/>\n' % (
escape(option_name),
option_type,
value) )
f.write(' <style name="%s"/>\n' % item.get_style_name() ) f.write(' <style name="%s"/>\n' % item.get_style_name() )
f.write(' </item>\n') f.write(' </item>\n')
if book.get_paper_name(): if book.get_paper_name():
f.write(' <paper name="%s"/>\n' % book.get_paper_name() ) f.write(' <paper name="%s"/>\n' % book.get_paper_name() )
if book.get_orientation() is not None: # 0 is legal if book.get_orientation() is not None: # 0 is legal
f.write(' <orientation value="%s"/>\n' % f.write(' <orientation value="%s"/>\n' %
book.get_orientation() ) book.get_orientation() )
if book.get_paper_metric() is not None: # 0 is legal if book.get_paper_metric() is not None: # 0 is legal
f.write(' <metric value="%s"/>\n' % book.get_paper_metric() ) f.write(' <metric value="%s"/>\n' % book.get_paper_metric() )
if book.get_custom_paper_size(): if book.get_custom_paper_size():
size = book.get_custom_paper_size() size = book.get_custom_paper_size()
f.write(' <size value="%f %f"/>\n' % (size[0], size[1]) ) f.write(' <size value="%f %f"/>\n' % (size[0], size[1]) )
if book.get_margins(): if book.get_margins():
for pos in range(len(book.get_margins())): for pos in range(len(book.get_margins())):
f.write(' <margin number="%s" value="%f"/>\n' % f.write(' <margin number="%s" value="%f"/>\n' %
(pos, book.get_margin(pos)) ) (pos, book.get_margin(pos)) )
if book.get_format_name(): if book.get_format_name():
f.write(' <format name="%s"/>\n' % book.get_format_name() ) f.write(' <format name="%s"/>\n' % book.get_format_name() )
if book.get_output(): if book.get_output():
f.write(' <output name="%s"/>\n' % book.get_output() ) f.write(' <output name="%s"/>\n' % book.get_output() )
f.write(' </book>\n') f.write(' </book>\n')
f.write('</booklist>\n') f.write('</booklist>\n')
f.close()
def parse(self): def parse(self):
""" """

View File

@ -504,9 +504,8 @@ class OptionListCollection(_options.OptionListCollection):
if os.path.isfile(self.filename): if os.path.isfile(self.filename):
p = make_parser() p = make_parser()
p.setContentHandler(OptionParser(self)) p.setContentHandler(OptionParser(self))
the_file = open(self.filename, encoding="utf-8") with open(self.filename, encoding="utf-8") as the_file:
p.parse(the_file) p.parse(the_file)
the_file.close()
except (IOError, OSError, SAXParseException): except (IOError, OSError, SAXParseException):
pass pass
@ -1000,9 +999,8 @@ class DocOptionListCollection(_options.OptionListCollection):
if os.path.isfile(self.filename): if os.path.isfile(self.filename):
p = make_parser() p = make_parser()
p.setContentHandler(DocOptionParser(self)) p.setContentHandler(DocOptionParser(self))
the_file = open(self.filename, encoding="utf-8") with open(self.filename, encoding="utf-8") as the_file:
p.parse(the_file) p.parse(the_file)
the_file.close()
except (IOError, OSError, SAXParseException): except (IOError, OSError, SAXParseException):
pass pass

View File

@ -279,9 +279,8 @@ def resize_to_jpeg_buffer(source, size, crop=None):
scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR) scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR)
os.close(filed) os.close(filed)
scaled.savev(dest, "jpeg", "", "") scaled.savev(dest, "jpeg", "", "")
ofile = open(dest, mode='rb') with open(dest, mode='rb') as ofile:
data = ofile.read() data = ofile.read()
ofile.close()
try: try:
os.unlink(dest) os.unlink(dest)
except: except:

View File

@ -285,10 +285,8 @@ class HtmlDoc(BaseDoc, TextDoc):
Copy support files to the datadir that needs to hold them Copy support files to the datadir that needs to hold them
""" """
#css of textdoc styles #css of textdoc styles
tdfile = open(os.path.join(self._backend.datadirfull(), with open(os.path.join(self._backend.datadirfull(), _TEXTDOCSCREEN), 'w') as tdfile:
_TEXTDOCSCREEN), 'w') tdfile.write(self.style_declaration)
tdfile.write(self.style_declaration)
tdfile.close()
#css file #css file
if self.css_filename: if self.css_filename:
#we do an extra check in case file does not exist, eg cli call #we do an extra check in case file does not exist, eg cli call

View File

@ -30,12 +30,11 @@ from test import test_util
test_util.path_append_parent() test_util.path_append_parent()
def get_potfile(filename): def get_potfile(filename):
fp = open(filename, "r") with open(filename, "r") as fp:
retvals = [] retvals = []
for line in fp: for line in fp:
if line and line[0] != "#": if line and line[0] != "#":
retvals.append(line.strip()) retvals.append(line.strip())
fp.close()
return retvals return retvals
# POTFILES.skip # POTFILES.skip
@ -55,9 +54,8 @@ class TestPOT(unittest.TestCase):
realpath = (dir + "/" + file) realpath = (dir + "/" + file)
pathfile = realpath[3:] pathfile = realpath[3:]
if os.path.exists(realpath): if os.path.exists(realpath):
fp = open(realpath, "r") with open(realpath, "r") as fp:
lines = fp.read() lines = fp.read()
fp.close()
found = False found = False
for search in searches: for search in searches:
if search in lines: if search in lines:
@ -88,9 +86,8 @@ class TestMake(unittest.TestCase):
if pathfile[3:] in excluded_files: if pathfile[3:] in excluded_files:
self.assertTrue(True, "exclude '%s'" % pathfile) self.assertTrue(True, "exclude '%s'" % pathfile)
elif os.path.exists(makefile): elif os.path.exists(makefile):
fp = open(makefile, "r") with open(makefile, "r") as fp:
lines = fp.read() lines = fp.read()
fp.close()
self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" % self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" %
(filename, path)) (filename, path))
else: else:
@ -107,9 +104,8 @@ class TestGetText(unittest.TestCase):
def helper(self, pofile, searches): def helper(self, pofile, searches):
if not os.path.exists("../../" + pofile): if not os.path.exists("../../" + pofile):
self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile) self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile)
fp = open("../../" + pofile, "r") with open("../../" + pofile, "r") as fp:
lines = fp.read() lines = fp.read()
fp.close()
found = False found = False
for search in searches: for search in searches:
found = (search in lines) or found found = (search in lines) or found

View File

@ -113,7 +113,6 @@ def tests():
print ('Please, install %(program)s for listing groups of messages' print ('Please, install %(program)s for listing groups of messages'
% {'program': msgattribCmd}) % {'program': msgattribCmd})
try: try:
print("\n===='xgettext' =(generate a new template)==============\n") print("\n===='xgettext' =(generate a new template)==============\n")
os.system('''%(program)s -V''' % {'program': xgettextCmd}) os.system('''%(program)s -V''' % {'program': xgettextCmd})
@ -127,12 +126,10 @@ def tests():
except: except:
print ('Please, install python') print ('Please, install python')
def TipsParse(filename, mark): def TipsParse(filename, mark):
""" """
Experimental alternative to 'intltool-extract' for 'tips.xml'. Experimental alternative to 'intltool-extract' for 'tips.xml'.
""" """
from xml.etree import ElementTree from xml.etree import ElementTree
tree = ElementTree.parse(filename) tree = ElementTree.parse(filename)
@ -166,23 +163,22 @@ def TipsParse(filename, mark):
"Editor." "Editor."
''' '''
tips = open('../data/tips.xml.in.h', 'w') with open('../data/tips.xml.in.h', 'w') as tips:
marklist = root.iter(mark) marklist = root.iter(mark)
for key in marklist: for key in marklist:
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml") tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
if sys.version_info[0] < 3: if sys.version_info[0] < 3:
tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "") tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "")
tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "") tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "")
else: # python3 support else: # python3 support
tip = tip.decode("utf-8") tip = tip.decode("utf-8")
tip = tip.replace('<_tip number="%(number)s">' % key.attrib, "") tip = tip.replace('<_tip number="%(number)s">' % key.attrib, "")
tip = tip.replace("<br />", "<br/>") tip = tip.replace("<br />", "<br/>")
#tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7 #tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7
#tip = tip.replace("\n<b>", "<b>") # special case tip 18 #tip = tip.replace("\n<b>", "<b>") # special case tip 18
tip = tip.replace("</_tip>\n\n", "") tip = tip.replace("</_tip>\n\n", "")
tip = tip.replace('"', '&quot;') tip = tip.replace('"', '&quot;')
tips.write('char *s = N_("%s");\n' % tip) tips.write('char *s = N_("%s");\n' % tip)
tips.close()
print ('Wrote ../data/tips.xml.in.h') print ('Wrote ../data/tips.xml.in.h')
root.clear() root.clear()
@ -190,7 +186,6 @@ def HolidaysParse(filename, mark):
""" """
Experimental alternative to 'intltool-extract' for 'holidays.xml'. Experimental alternative to 'intltool-extract' for 'holidays.xml'.
""" """
from xml.etree import ElementTree from xml.etree import ElementTree
tree = ElementTree.parse(filename) tree = ElementTree.parse(filename)
@ -214,16 +209,14 @@ def HolidaysParse(filename, mark):
msgid "Jewish Holidays" msgid "Jewish Holidays"
msgid "Yom Kippur" msgid "Yom Kippur"
''' '''
with open('../data/holidays.xml.in.h', 'w') as holidays:
holidays = open('../data/holidays.xml.in.h', 'w') for key in ellist:
for key in ellist: if key.attrib.get(mark):
if key.attrib.get(mark): line = key.attrib
line = key.attrib string = line.items
string = line.items # mapping via the line dict (_name is the key)
# mapping via the line dict (_name is the key) name = 'char *s = N_("%(_name)s");\n' % line
name = 'char *s = N_("%(_name)s");\n' % line holidays.write(name)
holidays.write(name)
holidays.close()
print ('Wrote ../data/holidays.xml.in.h') print ('Wrote ../data/holidays.xml.in.h')
root.clear() root.clear()
@ -232,7 +225,6 @@ def XmlParse(filename, mark):
""" """
Experimental alternative to 'intltool-extract' for 'file.xml.in'. Experimental alternative to 'intltool-extract' for 'file.xml.in'.
""" """
from xml.etree import ElementTree from xml.etree import ElementTree
tree = ElementTree.parse(filename) tree = ElementTree.parse(filename)
@ -262,24 +254,21 @@ def XmlParse(filename, mark):
</p> </p>
''' '''
head = open(filename + '.h', 'w') with open(filename + '.h', 'w') as head:
for key in root.iter():
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
comment = 'char *s = N_("%s");\n' % key.text
head.write(comment)
if root.tag == 'application':
for key in root.iter(): for key in root.iter():
if key.tag == mark: if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
comment = 'char *s = N_("%s");\n' % key.text comment = 'char *s = N_("%s");\n' % key.text
head.write(comment) head.write(comment)
head.close() if root.tag == 'application':
for key in root.iter():
if key.tag == mark:
comment = 'char *s = N_("%s");\n' % key.text
head.write(comment)
print ('Wrote %s' % filename) print ('Wrote %s' % filename)
root.clear() root.clear()
def DesktopParse(filename): def DesktopParse(filename):
""" """
Experimental alternative to 'intltool-extract' for 'gramps.desktop'. Experimental alternative to 'intltool-extract' for 'gramps.desktop'.
@ -300,24 +289,20 @@ def DesktopParse(filename):
"Manage genealogical information, "Manage genealogical information,
perform genealogical research and analysis" perform genealogical research and analysis"
''' '''
with open('../data/gramps.desktop.in.h', 'w') as desktop:
desktop = open('../data/gramps.desktop.in.h', 'w') with open(filename) as f:
lines = [file.strip() for file in f]
f = open(filename) for line in lines:
lines = [file.strip() for file in f] if line[0] == '_':
f.close() for i in range(len(line)):
if line[i] == '=':
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
desktop.write(val)
for line in lines:
if line[0] == '_':
for i in range(len(line)):
if line[i] == '=':
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
desktop.write(val)
desktop.close()
print ('Wrote ../data/gramps.desktop.in.h') print ('Wrote ../data/gramps.desktop.in.h')
def KeyParse(filename, mark): def KeyParse(filename, mark):
""" """
Experimental alternative to 'intltool-extract' for 'gramps.keys'. Experimental alternative to 'intltool-extract' for 'gramps.keys'.
@ -342,30 +327,26 @@ def KeyParse(filename, mark):
msgid "Gramps XML database" msgid "Gramps XML database"
msgid "GEDCOM" msgid "GEDCOM"
''' '''
with open('../data/gramps.keys.in.h', 'w') as key:
key = open('../data/gramps.keys.in.h', 'w') with open(filename) as f:
lines = [file for file in f]
f = open(filename) temp = []
lines = [file for file in f]
f.close()
temp = [] for line in lines:
for i in range(len(line)):
if line[i:i+12] == mark:
temp.append(line.strip())
for line in lines: for t in temp:
for i in range(len(line)): for i in range(len(t)):
if line[i:i+12] == mark: if t[i] == '=':
temp.append(line.strip()) val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
key.write(val)
for t in temp:
for i in range(len(t)):
if t[i] == '=':
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
key.write(val)
key.close()
print ('Wrote ../data/gramps.keys.in.h') print ('Wrote ../data/gramps.keys.in.h')
def main(): def main():
""" """
The utility for handling translation stuff. The utility for handling translation stuff.
@ -406,7 +387,7 @@ def main():
choices=LANG, choices=LANG,
help="check lang.po files") help="check lang.po files")
# testing stage # testing stage
trans = parser.add_argument_group('Translation', 'Display content of translations file') trans = parser.add_argument_group('Translation', 'Display content of translations file')
# need one argument (eg, de.po) # need one argument (eg, de.po)
@ -418,7 +399,6 @@ def main():
choices=[file for file in os.listdir('.') if file.endswith('.po')], choices=[file for file in os.listdir('.') if file.endswith('.po')],
help="list fuzzy messages") help="list fuzzy messages")
args = parser.parse_args() args = parser.parse_args()
namespace, extra = parser.parse_known_args() namespace, extra = parser.parse_known_args()
@ -467,14 +447,13 @@ def create_filesfile():
dir = os.getcwd() dir = os.getcwd()
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps')) topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
lentopdir = len(topdir) lentopdir = len(topdir)
f = open('POTFILES.in') with open('POTFILES.in') as f:
infiles = dict(['../' + file.strip(), None] for file in f if file.strip() infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
and not file[0]=='#') and not file[0]=='#')
f.close()
f = open('POTFILES.skip') with open('POTFILES.skip') as f:
notinfiles = dict(['../' + file.strip(), None] for file in f if file notinfiles = dict(['../' + file.strip(), None] for file in f if file
and not file[0]=='#') and not file[0]=='#')
f.close()
for (dirpath, dirnames, filenames) in os.walk(topdir): for (dirpath, dirnames, filenames) in os.walk(topdir):
root, subdir = os.path.split(dirpath) root, subdir = os.path.split(dirpath)
@ -499,11 +478,10 @@ def create_filesfile():
if full_filename[lentopdir:] in notinfiles: if full_filename[lentopdir:] in notinfiles:
infiles['../gramps' + full_filename[lentopdir:]] = None infiles['../gramps' + full_filename[lentopdir:]] = None
#now we write out all the files in form ../gramps/filename #now we write out all the files in form ../gramps/filename
f = open('tmpfiles', 'w') with open('tmpfiles', 'w') as f:
for file in sorted(infiles.keys()): for file in sorted(infiles.keys()):
f.write(file) f.write(file)
f.write('\n') f.write('\n')
f.close()
def listing(name, extensionlist): def listing(name, extensionlist):
""" """
@ -512,20 +490,16 @@ def listing(name, extensionlist):
Like POTFILES.in and POTFILES.skip Like POTFILES.in and POTFILES.skip
""" """
f = open('tmpfiles') with open('tmpfiles') as f:
files = [file.strip() for file in f if file and not file[0]=='#'] files = [file.strip() for file in f if file and not file[0]=='#']
f.close()
temp = open(name, 'w') with open(name, 'w') as temp:
for entry in files:
for entry in files: for ext in extensionlist:
for ext in extensionlist: if entry.endswith(ext):
if entry.endswith(ext): temp.write(entry)
temp.write(entry) temp.write('\n')
temp.write('\n') break
break
temp.close()
def headers(): def headers():
""" """
@ -558,7 +532,6 @@ def extract_xml():
Extract translation strings from XML based, keys, mime and desktop Extract translation strings from XML based, keys, mime and desktop
files. Own XML files parsing and custom translation marks. files. Own XML files parsing and custom translation marks.
""" """
HolidaysParse('../data/holidays.xml.in', '_name') HolidaysParse('../data/holidays.xml.in', '_name')
TipsParse('../data/tips.xml.in', '_tip') TipsParse('../data/tips.xml.in', '_tip')
XmlParse('../data/gramps.xml.in', '_comment') XmlParse('../data/gramps.xml.in', '_comment')
@ -570,8 +543,8 @@ def create_template():
""" """
Create a new file for template, if it does not exist. Create a new file for template, if it does not exist.
""" """
template = open('gramps.pot', 'w') with open('gramps.pot', 'w') as template:
template.close() pass
def extract_glade(): def extract_glade():
""" """
@ -624,19 +597,17 @@ def extract_gtkbuilder():
''' '''
files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade'] files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade']
temp = open('gtklist.h', 'w') with open('gtklist.h', 'w') as temp:
for filename in files:
tree = ElementTree.parse(filename)
root = tree.getroot()
for line in root.iter():
att = line.attrib
if att == {'id': '0', 'translatable': 'yes'}:
col = 'char *s = N_("%s");\n' % line.text
temp.write(col)
root.clear()
for filename in files:
tree = ElementTree.parse(filename)
root = tree.getroot()
for line in root.iter():
att = line.attrib
if att == {'id': '0', 'translatable': 'yes'}:
col = 'char *s = N_("%s");\n' % line.text
temp.write(col)
root.clear()
temp.close()
print ('Wrote gtklist.h') print ('Wrote gtklist.h')
def retrieve(): def retrieve():
@ -696,7 +667,6 @@ def merge(args):
""" """
Merge messages with 'gramps.pot' Merge messages with 'gramps.pot'
""" """
for arg in args: for arg in args:
if arg == 'all': if arg == 'all':
continue continue
@ -709,7 +679,6 @@ def check(args):
""" """
Check the translation file Check the translation file
""" """
for arg in args: for arg in args:
if arg == 'all': if arg == 'all':
continue continue
@ -724,14 +693,12 @@ def untranslated(arg):
""" """
List untranslated messages List untranslated messages
""" """
os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]}) os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
def fuzzy(arg): def fuzzy(arg):
""" """
List fuzzy messages List fuzzy messages
""" """
os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]}) os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
if __name__ == "__main__": if __name__ == "__main__":