Prefer with context manager to open files
This commit is contained in:
parent
f093c8bd79
commit
5dc5615bfd
@ -234,9 +234,8 @@ class CLIDbManager(object):
|
|||||||
except:
|
except:
|
||||||
version = (0, 0, 0)
|
version = (0, 0, 0)
|
||||||
if os.path.isfile(path_name):
|
if os.path.isfile(path_name):
|
||||||
file = open(path_name, 'r', encoding='utf8')
|
with open(path_name, 'r', encoding='utf8') as file:
|
||||||
name = file.readline().strip()
|
name = file.readline().strip()
|
||||||
file.close()
|
|
||||||
|
|
||||||
(tval, last) = time_val(dirpath)
|
(tval, last) = time_val(dirpath)
|
||||||
(enable, stock_id) = self.icon_values(dirpath, self.active,
|
(enable, stock_id) = self.icon_values(dirpath, self.active,
|
||||||
@ -293,9 +292,8 @@ class CLIDbManager(object):
|
|||||||
name_list = [ name[0] for name in self.current_names ]
|
name_list = [ name[0] for name in self.current_names ]
|
||||||
title = find_next_db_name(name_list)
|
title = find_next_db_name(name_list)
|
||||||
|
|
||||||
name_file = open(path_name, "w", encoding='utf8')
|
with open(path_name, "w", encoding='utf8') as name_file:
|
||||||
name_file.write(title)
|
name_file.write(title)
|
||||||
name_file.close()
|
|
||||||
|
|
||||||
if create_db:
|
if create_db:
|
||||||
# write the version number into metadata
|
# write the version number into metadata
|
||||||
@ -409,9 +407,8 @@ class CLIDbManager(object):
|
|||||||
dirpath = os.path.join(dbdir, dpath)
|
dirpath = os.path.join(dbdir, dpath)
|
||||||
path_name = os.path.join(dirpath, NAME_FILE)
|
path_name = os.path.join(dirpath, NAME_FILE)
|
||||||
if os.path.isfile(path_name):
|
if os.path.isfile(path_name):
|
||||||
file = open(path_name, 'r', encoding='utf8')
|
with open(path_name, 'r', encoding='utf8') as file:
|
||||||
name = file.readline().strip()
|
name = file.readline().strip()
|
||||||
file.close()
|
|
||||||
if re.match("^" + dbname + "$", name):
|
if re.match("^" + dbname + "$", name):
|
||||||
match_list.append((name, dirpath))
|
match_list.append((name, dirpath))
|
||||||
if len(match_list) == 0:
|
if len(match_list) == 0:
|
||||||
@ -438,12 +435,10 @@ class CLIDbManager(object):
|
|||||||
Returns old_name, new_name if success, None, None if no success
|
Returns old_name, new_name if success, None, None if no success
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
name_file = open(filepath, "r", encoding='utf8')
|
with open(filepath, "r", encoding='utf8') as name_file:
|
||||||
old_text=name_file.read()
|
old_text=name_file.read()
|
||||||
name_file.close()
|
with open(filepath, "w", encoding='utf8') as name_file:
|
||||||
name_file = open(filepath, "w", encoding='utf8')
|
|
||||||
name_file.write(new_text)
|
name_file.write(new_text)
|
||||||
name_file.close()
|
|
||||||
except (OSError, IOError) as msg:
|
except (OSError, IOError) as msg:
|
||||||
CLIDbManager.ERROR(_("Could not rename Family Tree"),
|
CLIDbManager.ERROR(_("Could not rename Family Tree"),
|
||||||
str(msg))
|
str(msg))
|
||||||
@ -543,11 +538,10 @@ def find_locker_name(dirpath):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
fname = os.path.join(dirpath, "lock")
|
fname = os.path.join(dirpath, "lock")
|
||||||
ifile = open(fname, 'r', encoding='utf8')
|
with open(fname, 'r', encoding='utf8') as ifile:
|
||||||
username = ifile.read().strip()
|
username = ifile.read().strip()
|
||||||
# feature request 2356: avoid genitive form
|
# feature request 2356: avoid genitive form
|
||||||
last = _("Locked by %s") % username
|
last = _("Locked by %s") % username
|
||||||
ifile.close()
|
|
||||||
except (OSError, IOError, UnicodeDecodeError):
|
except (OSError, IOError, UnicodeDecodeError):
|
||||||
last = _("Unknown")
|
last = _("Unknown")
|
||||||
return last
|
return last
|
||||||
|
@ -203,25 +203,22 @@ class DbState(Callback):
|
|||||||
dirpath = os.path.join(dbdir, dpath)
|
dirpath = os.path.join(dbdir, dpath)
|
||||||
path_name = os.path.join(dirpath, "name.txt")
|
path_name = os.path.join(dirpath, "name.txt")
|
||||||
if os.path.isfile(path_name):
|
if os.path.isfile(path_name):
|
||||||
file = open(path_name, 'r', encoding='utf8')
|
with open(path_name, 'r', encoding='utf8') as file:
|
||||||
name = file.readline().strip()
|
name = file.readline().strip()
|
||||||
file.close()
|
|
||||||
if dbname == name:
|
if dbname == name:
|
||||||
locked = False
|
locked = False
|
||||||
locked_by = None
|
locked_by = None
|
||||||
backend = None
|
backend = None
|
||||||
fname = os.path.join(dirpath, "database.txt")
|
fname = os.path.join(dirpath, "database.txt")
|
||||||
if os.path.isfile(fname):
|
if os.path.isfile(fname):
|
||||||
ifile = open(fname, 'r', encoding='utf8')
|
with open(fname, 'r', encoding='utf8') as ifile:
|
||||||
backend = ifile.read().strip()
|
backend = ifile.read().strip()
|
||||||
ifile.close()
|
|
||||||
else:
|
else:
|
||||||
backend = "bsddb"
|
backend = "bsddb"
|
||||||
try:
|
try:
|
||||||
fname = os.path.join(dirpath, "lock")
|
fname = os.path.join(dirpath, "lock")
|
||||||
ifile = open(fname, 'r', encoding='utf8')
|
with open(fname, 'r', encoding='utf8') as ifile:
|
||||||
locked_by = ifile.read().strip()
|
locked_by = ifile.read().strip()
|
||||||
locked = True
|
|
||||||
ifile.close()
|
ifile.close()
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
pass
|
pass
|
||||||
|
@ -103,9 +103,8 @@ class FilterList(object):
|
|||||||
if os.path.isfile(self.file):
|
if os.path.isfile(self.file):
|
||||||
parser = make_parser()
|
parser = make_parser()
|
||||||
parser.setContentHandler(FilterParser(self))
|
parser.setContentHandler(FilterParser(self))
|
||||||
the_file = open(self.file, 'r', encoding='utf8')
|
with open(self.file, 'r', encoding='utf8') as the_file:
|
||||||
parser.parse(the_file)
|
parser.parse(the_file)
|
||||||
the_file.close()
|
|
||||||
except (IOError, OSError):
|
except (IOError, OSError):
|
||||||
print("IO/OSError in _filterlist.py")
|
print("IO/OSError in _filterlist.py")
|
||||||
except SAXParseException:
|
except SAXParseException:
|
||||||
|
@ -603,9 +603,8 @@ class GVDotDoc(GVDocBase):
|
|||||||
if self._filename[-3:] != ".gv":
|
if self._filename[-3:] != ".gv":
|
||||||
self._filename += ".gv"
|
self._filename += ".gv"
|
||||||
|
|
||||||
dotfile = open(self._filename, "wb")
|
with open(self._filename, "wb") as dotfile:
|
||||||
dotfile.write(self._dot.getvalue())
|
dotfile.write(self._dot.getvalue())
|
||||||
dotfile.close()
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
|
@ -146,7 +146,7 @@ class StyleSheetList(object):
|
|||||||
"""
|
"""
|
||||||
Saves the current StyleSheet definitions to the associated file.
|
Saves the current StyleSheet definitions to the associated file.
|
||||||
"""
|
"""
|
||||||
xml_file = open(self.__file, "w")
|
with open(self.__file, "w") as xml_file:
|
||||||
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
||||||
xml_file.write('<stylelist>\n')
|
xml_file.write('<stylelist>\n')
|
||||||
|
|
||||||
@ -170,7 +170,6 @@ class StyleSheetList(object):
|
|||||||
|
|
||||||
xml_file.write('</sheet>\n')
|
xml_file.write('</sheet>\n')
|
||||||
xml_file.write('</stylelist>\n')
|
xml_file.write('</stylelist>\n')
|
||||||
xml_file.close()
|
|
||||||
|
|
||||||
def write_paragraph_style(self, xml_file, sheet, p_name):
|
def write_paragraph_style(self, xml_file, sheet, p_name):
|
||||||
|
|
||||||
@ -275,9 +274,8 @@ class StyleSheetList(object):
|
|||||||
if os.path.isfile(self.__file):
|
if os.path.isfile(self.__file):
|
||||||
parser = make_parser()
|
parser = make_parser()
|
||||||
parser.setContentHandler(SheetParser(self))
|
parser.setContentHandler(SheetParser(self))
|
||||||
the_file = open(self.__file)
|
with open(self.__file) as the_file:
|
||||||
parser.parse(the_file)
|
parser.parse(the_file)
|
||||||
the_file.close()
|
|
||||||
except (IOError, OSError, SAXParseException):
|
except (IOError, OSError, SAXParseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -458,7 +458,7 @@ class BookList(object):
|
|||||||
"""
|
"""
|
||||||
Saves the current BookList to the associated file.
|
Saves the current BookList to the associated file.
|
||||||
"""
|
"""
|
||||||
f = open(self.file, "w")
|
with open(self.file, "w") as f:
|
||||||
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
|
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
|
||||||
f.write('<booklist>\n')
|
f.write('<booklist>\n')
|
||||||
for name in sorted(self.bookmap): # enable a diff of archived copies
|
for name in sorted(self.bookmap): # enable a diff of archived copies
|
||||||
@ -519,7 +519,6 @@ class BookList(object):
|
|||||||
f.write(' </book>\n')
|
f.write(' </book>\n')
|
||||||
|
|
||||||
f.write('</booklist>\n')
|
f.write('</booklist>\n')
|
||||||
f.close()
|
|
||||||
|
|
||||||
def parse(self):
|
def parse(self):
|
||||||
"""
|
"""
|
||||||
|
@ -504,9 +504,8 @@ class OptionListCollection(_options.OptionListCollection):
|
|||||||
if os.path.isfile(self.filename):
|
if os.path.isfile(self.filename):
|
||||||
p = make_parser()
|
p = make_parser()
|
||||||
p.setContentHandler(OptionParser(self))
|
p.setContentHandler(OptionParser(self))
|
||||||
the_file = open(self.filename, encoding="utf-8")
|
with open(self.filename, encoding="utf-8") as the_file:
|
||||||
p.parse(the_file)
|
p.parse(the_file)
|
||||||
the_file.close()
|
|
||||||
except (IOError, OSError, SAXParseException):
|
except (IOError, OSError, SAXParseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -1000,9 +999,8 @@ class DocOptionListCollection(_options.OptionListCollection):
|
|||||||
if os.path.isfile(self.filename):
|
if os.path.isfile(self.filename):
|
||||||
p = make_parser()
|
p = make_parser()
|
||||||
p.setContentHandler(DocOptionParser(self))
|
p.setContentHandler(DocOptionParser(self))
|
||||||
the_file = open(self.filename, encoding="utf-8")
|
with open(self.filename, encoding="utf-8") as the_file:
|
||||||
p.parse(the_file)
|
p.parse(the_file)
|
||||||
the_file.close()
|
|
||||||
except (IOError, OSError, SAXParseException):
|
except (IOError, OSError, SAXParseException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -279,9 +279,8 @@ def resize_to_jpeg_buffer(source, size, crop=None):
|
|||||||
scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR)
|
scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR)
|
||||||
os.close(filed)
|
os.close(filed)
|
||||||
scaled.savev(dest, "jpeg", "", "")
|
scaled.savev(dest, "jpeg", "", "")
|
||||||
ofile = open(dest, mode='rb')
|
with open(dest, mode='rb') as ofile:
|
||||||
data = ofile.read()
|
data = ofile.read()
|
||||||
ofile.close()
|
|
||||||
try:
|
try:
|
||||||
os.unlink(dest)
|
os.unlink(dest)
|
||||||
except:
|
except:
|
||||||
|
@ -436,8 +436,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
|||||||
|
|
||||||
def __log_error(self):
|
def __log_error(self):
|
||||||
mypath = os.path.join(self.get_save_path(),DBRECOVFN)
|
mypath = os.path.join(self.get_save_path(),DBRECOVFN)
|
||||||
ofile = open(mypath, "w")
|
with open(mypath, "w") as ofile:
|
||||||
ofile.close()
|
|
||||||
try:
|
try:
|
||||||
clear_lock_file(self.get_save_path())
|
clear_lock_file(self.get_save_path())
|
||||||
except:
|
except:
|
||||||
|
@ -285,10 +285,8 @@ class HtmlDoc(BaseDoc, TextDoc):
|
|||||||
Copy support files to the datadir that needs to hold them
|
Copy support files to the datadir that needs to hold them
|
||||||
"""
|
"""
|
||||||
#css of textdoc styles
|
#css of textdoc styles
|
||||||
tdfile = open(os.path.join(self._backend.datadirfull(),
|
with open(os.path.join(self._backend.datadirfull(), _TEXTDOCSCREEN), 'w') as tdfile:
|
||||||
_TEXTDOCSCREEN), 'w')
|
|
||||||
tdfile.write(self.style_declaration)
|
tdfile.write(self.style_declaration)
|
||||||
tdfile.close()
|
|
||||||
#css file
|
#css file
|
||||||
if self.css_filename:
|
if self.css_filename:
|
||||||
#we do an extra check in case file does not exist, eg cli call
|
#we do an extra check in case file does not exist, eg cli call
|
||||||
|
@ -30,12 +30,11 @@ from test import test_util
|
|||||||
test_util.path_append_parent()
|
test_util.path_append_parent()
|
||||||
|
|
||||||
def get_potfile(filename):
|
def get_potfile(filename):
|
||||||
fp = open(filename, "r")
|
with open(filename, "r") as fp:
|
||||||
retvals = []
|
retvals = []
|
||||||
for line in fp:
|
for line in fp:
|
||||||
if line and line[0] != "#":
|
if line and line[0] != "#":
|
||||||
retvals.append(line.strip())
|
retvals.append(line.strip())
|
||||||
fp.close()
|
|
||||||
return retvals
|
return retvals
|
||||||
|
|
||||||
# POTFILES.skip
|
# POTFILES.skip
|
||||||
@ -55,9 +54,8 @@ class TestPOT(unittest.TestCase):
|
|||||||
realpath = (dir + "/" + file)
|
realpath = (dir + "/" + file)
|
||||||
pathfile = realpath[3:]
|
pathfile = realpath[3:]
|
||||||
if os.path.exists(realpath):
|
if os.path.exists(realpath):
|
||||||
fp = open(realpath, "r")
|
with open(realpath, "r") as fp:
|
||||||
lines = fp.read()
|
lines = fp.read()
|
||||||
fp.close()
|
|
||||||
found = False
|
found = False
|
||||||
for search in searches:
|
for search in searches:
|
||||||
if search in lines:
|
if search in lines:
|
||||||
@ -88,9 +86,8 @@ class TestMake(unittest.TestCase):
|
|||||||
if pathfile[3:] in excluded_files:
|
if pathfile[3:] in excluded_files:
|
||||||
self.assertTrue(True, "exclude '%s'" % pathfile)
|
self.assertTrue(True, "exclude '%s'" % pathfile)
|
||||||
elif os.path.exists(makefile):
|
elif os.path.exists(makefile):
|
||||||
fp = open(makefile, "r")
|
with open(makefile, "r") as fp:
|
||||||
lines = fp.read()
|
lines = fp.read()
|
||||||
fp.close()
|
|
||||||
self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" %
|
self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" %
|
||||||
(filename, path))
|
(filename, path))
|
||||||
else:
|
else:
|
||||||
@ -107,9 +104,8 @@ class TestGetText(unittest.TestCase):
|
|||||||
def helper(self, pofile, searches):
|
def helper(self, pofile, searches):
|
||||||
if not os.path.exists("../../" + pofile):
|
if not os.path.exists("../../" + pofile):
|
||||||
self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile)
|
self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile)
|
||||||
fp = open("../../" + pofile, "r")
|
with open("../../" + pofile, "r") as fp:
|
||||||
lines = fp.read()
|
lines = fp.read()
|
||||||
fp.close()
|
|
||||||
found = False
|
found = False
|
||||||
for search in searches:
|
for search in searches:
|
||||||
found = (search in lines) or found
|
found = (search in lines) or found
|
||||||
|
@ -166,7 +166,7 @@ def TipsParse(filename, mark):
|
|||||||
"Editor."
|
"Editor."
|
||||||
'''
|
'''
|
||||||
|
|
||||||
tips = open('../data/tips.xml.in.h', 'w')
|
with open('../data/tips.xml.in.h', 'w') as tips:
|
||||||
marklist = root.iter(mark)
|
marklist = root.iter(mark)
|
||||||
for key in marklist:
|
for key in marklist:
|
||||||
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
|
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
|
||||||
@ -182,7 +182,6 @@ def TipsParse(filename, mark):
|
|||||||
tip = tip.replace("</_tip>\n\n", "")
|
tip = tip.replace("</_tip>\n\n", "")
|
||||||
tip = tip.replace('"', '"')
|
tip = tip.replace('"', '"')
|
||||||
tips.write('char *s = N_("%s");\n' % tip)
|
tips.write('char *s = N_("%s");\n' % tip)
|
||||||
tips.close()
|
|
||||||
print ('Wrote ../data/tips.xml.in.h')
|
print ('Wrote ../data/tips.xml.in.h')
|
||||||
root.clear()
|
root.clear()
|
||||||
|
|
||||||
@ -215,7 +214,7 @@ def HolidaysParse(filename, mark):
|
|||||||
msgid "Yom Kippur"
|
msgid "Yom Kippur"
|
||||||
'''
|
'''
|
||||||
|
|
||||||
holidays = open('../data/holidays.xml.in.h', 'w')
|
with open('../data/holidays.xml.in.h', 'w') as holidays:
|
||||||
for key in ellist:
|
for key in ellist:
|
||||||
if key.attrib.get(mark):
|
if key.attrib.get(mark):
|
||||||
line = key.attrib
|
line = key.attrib
|
||||||
@ -223,7 +222,6 @@ def HolidaysParse(filename, mark):
|
|||||||
# mapping via the line dict (_name is the key)
|
# mapping via the line dict (_name is the key)
|
||||||
name = 'char *s = N_("%(_name)s");\n' % line
|
name = 'char *s = N_("%(_name)s");\n' % line
|
||||||
holidays.write(name)
|
holidays.write(name)
|
||||||
holidays.close()
|
|
||||||
print ('Wrote ../data/holidays.xml.in.h')
|
print ('Wrote ../data/holidays.xml.in.h')
|
||||||
root.clear()
|
root.clear()
|
||||||
|
|
||||||
@ -262,7 +260,7 @@ def XmlParse(filename, mark):
|
|||||||
</p>
|
</p>
|
||||||
'''
|
'''
|
||||||
|
|
||||||
head = open(filename + '.h', 'w')
|
with open(filename + '.h', 'w') as head:
|
||||||
|
|
||||||
for key in root.iter():
|
for key in root.iter():
|
||||||
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
|
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
|
||||||
@ -275,7 +273,6 @@ def XmlParse(filename, mark):
|
|||||||
comment = 'char *s = N_("%s");\n' % key.text
|
comment = 'char *s = N_("%s");\n' % key.text
|
||||||
head.write(comment)
|
head.write(comment)
|
||||||
|
|
||||||
head.close()
|
|
||||||
print ('Wrote %s' % filename)
|
print ('Wrote %s' % filename)
|
||||||
root.clear()
|
root.clear()
|
||||||
|
|
||||||
@ -301,11 +298,10 @@ def DesktopParse(filename):
|
|||||||
perform genealogical research and analysis"
|
perform genealogical research and analysis"
|
||||||
'''
|
'''
|
||||||
|
|
||||||
desktop = open('../data/gramps.desktop.in.h', 'w')
|
with open('../data/gramps.desktop.in.h', 'w') as desktop:
|
||||||
|
|
||||||
f = open(filename)
|
with open(filename) as f:
|
||||||
lines = [file.strip() for file in f]
|
lines = [file.strip() for file in f]
|
||||||
f.close()
|
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if line[0] == '_':
|
if line[0] == '_':
|
||||||
@ -314,10 +310,8 @@ def DesktopParse(filename):
|
|||||||
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
|
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
|
||||||
desktop.write(val)
|
desktop.write(val)
|
||||||
|
|
||||||
desktop.close()
|
|
||||||
print ('Wrote ../data/gramps.desktop.in.h')
|
print ('Wrote ../data/gramps.desktop.in.h')
|
||||||
|
|
||||||
|
|
||||||
def KeyParse(filename, mark):
|
def KeyParse(filename, mark):
|
||||||
"""
|
"""
|
||||||
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
|
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
|
||||||
@ -343,11 +337,10 @@ def KeyParse(filename, mark):
|
|||||||
msgid "GEDCOM"
|
msgid "GEDCOM"
|
||||||
'''
|
'''
|
||||||
|
|
||||||
key = open('../data/gramps.keys.in.h', 'w')
|
with open('../data/gramps.keys.in.h', 'w') as key:
|
||||||
|
|
||||||
f = open(filename)
|
with open(filename) as f:
|
||||||
lines = [file for file in f]
|
lines = [file for file in f]
|
||||||
f.close()
|
|
||||||
|
|
||||||
temp = []
|
temp = []
|
||||||
|
|
||||||
@ -362,10 +355,8 @@ def KeyParse(filename, mark):
|
|||||||
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
|
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
|
||||||
key.write(val)
|
key.write(val)
|
||||||
|
|
||||||
key.close()
|
|
||||||
print ('Wrote ../data/gramps.keys.in.h')
|
print ('Wrote ../data/gramps.keys.in.h')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
"""
|
||||||
The utility for handling translation stuff.
|
The utility for handling translation stuff.
|
||||||
@ -467,14 +458,13 @@ def create_filesfile():
|
|||||||
dir = os.getcwd()
|
dir = os.getcwd()
|
||||||
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
|
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
|
||||||
lentopdir = len(topdir)
|
lentopdir = len(topdir)
|
||||||
f = open('POTFILES.in')
|
with open('POTFILES.in') as f:
|
||||||
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
|
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
|
||||||
and not file[0]=='#')
|
and not file[0]=='#')
|
||||||
f.close()
|
|
||||||
f = open('POTFILES.skip')
|
with open('POTFILES.skip') as f:
|
||||||
notinfiles = dict(['../' + file.strip(), None] for file in f if file
|
notinfiles = dict(['../' + file.strip(), None] for file in f if file
|
||||||
and not file[0]=='#')
|
and not file[0]=='#')
|
||||||
f.close()
|
|
||||||
|
|
||||||
for (dirpath, dirnames, filenames) in os.walk(topdir):
|
for (dirpath, dirnames, filenames) in os.walk(topdir):
|
||||||
root, subdir = os.path.split(dirpath)
|
root, subdir = os.path.split(dirpath)
|
||||||
@ -499,11 +489,10 @@ def create_filesfile():
|
|||||||
if full_filename[lentopdir:] in notinfiles:
|
if full_filename[lentopdir:] in notinfiles:
|
||||||
infiles['../gramps' + full_filename[lentopdir:]] = None
|
infiles['../gramps' + full_filename[lentopdir:]] = None
|
||||||
#now we write out all the files in form ../gramps/filename
|
#now we write out all the files in form ../gramps/filename
|
||||||
f = open('tmpfiles', 'w')
|
with open('tmpfiles', 'w') as f:
|
||||||
for file in sorted(infiles.keys()):
|
for file in sorted(infiles.keys()):
|
||||||
f.write(file)
|
f.write(file)
|
||||||
f.write('\n')
|
f.write('\n')
|
||||||
f.close()
|
|
||||||
|
|
||||||
def listing(name, extensionlist):
|
def listing(name, extensionlist):
|
||||||
"""
|
"""
|
||||||
@ -512,11 +501,10 @@ def listing(name, extensionlist):
|
|||||||
Like POTFILES.in and POTFILES.skip
|
Like POTFILES.in and POTFILES.skip
|
||||||
"""
|
"""
|
||||||
|
|
||||||
f = open('tmpfiles')
|
with open('tmpfiles') as f:
|
||||||
files = [file.strip() for file in f if file and not file[0]=='#']
|
files = [file.strip() for file in f if file and not file[0]=='#']
|
||||||
f.close()
|
|
||||||
|
|
||||||
temp = open(name, 'w')
|
with open(name, 'w') as temp:
|
||||||
|
|
||||||
for entry in files:
|
for entry in files:
|
||||||
for ext in extensionlist:
|
for ext in extensionlist:
|
||||||
@ -525,8 +513,6 @@ def listing(name, extensionlist):
|
|||||||
temp.write('\n')
|
temp.write('\n')
|
||||||
break
|
break
|
||||||
|
|
||||||
temp.close()
|
|
||||||
|
|
||||||
def headers():
|
def headers():
|
||||||
"""
|
"""
|
||||||
Look at existing C file format headers.
|
Look at existing C file format headers.
|
||||||
@ -570,8 +556,7 @@ def create_template():
|
|||||||
"""
|
"""
|
||||||
Create a new file for template, if it does not exist.
|
Create a new file for template, if it does not exist.
|
||||||
"""
|
"""
|
||||||
template = open('gramps.pot', 'w')
|
with open('gramps.pot', 'w') as template:
|
||||||
template.close()
|
|
||||||
|
|
||||||
def extract_glade():
|
def extract_glade():
|
||||||
"""
|
"""
|
||||||
@ -624,7 +609,7 @@ def extract_gtkbuilder():
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade']
|
files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade']
|
||||||
temp = open('gtklist.h', 'w')
|
with open('gtklist.h', 'w') as temp:
|
||||||
|
|
||||||
for filename in files:
|
for filename in files:
|
||||||
tree = ElementTree.parse(filename)
|
tree = ElementTree.parse(filename)
|
||||||
@ -636,7 +621,6 @@ def extract_gtkbuilder():
|
|||||||
temp.write(col)
|
temp.write(col)
|
||||||
root.clear()
|
root.clear()
|
||||||
|
|
||||||
temp.close()
|
|
||||||
print ('Wrote gtklist.h')
|
print ('Wrote gtklist.h')
|
||||||
|
|
||||||
def retrieve():
|
def retrieve():
|
||||||
|
Loading…
Reference in New Issue
Block a user