Merge pull request #113 from sam-m888/useWithOpen
Prefer with to open files
This commit is contained in:
commit
b526a41af1
@ -234,9 +234,8 @@ class CLIDbManager(object):
|
||||
except:
|
||||
version = (0, 0, 0)
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
|
||||
(tval, last) = time_val(dirpath)
|
||||
(enable, stock_id) = self.icon_values(dirpath, self.active,
|
||||
@ -293,9 +292,8 @@ class CLIDbManager(object):
|
||||
name_list = [ name[0] for name in self.current_names ]
|
||||
title = find_next_db_name(name_list)
|
||||
|
||||
name_file = open(path_name, "w", encoding='utf8')
|
||||
with open(path_name, "w", encoding='utf8') as name_file:
|
||||
name_file.write(title)
|
||||
name_file.close()
|
||||
|
||||
if create_db:
|
||||
# write the version number into metadata
|
||||
@ -409,9 +407,8 @@ class CLIDbManager(object):
|
||||
dirpath = os.path.join(dbdir, dpath)
|
||||
path_name = os.path.join(dirpath, NAME_FILE)
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
if re.match("^" + dbname + "$", name):
|
||||
match_list.append((name, dirpath))
|
||||
if len(match_list) == 0:
|
||||
@ -438,12 +435,10 @@ class CLIDbManager(object):
|
||||
Returns old_name, new_name if success, None, None if no success
|
||||
"""
|
||||
try:
|
||||
name_file = open(filepath, "r", encoding='utf8')
|
||||
with open(filepath, "r", encoding='utf8') as name_file:
|
||||
old_text=name_file.read()
|
||||
name_file.close()
|
||||
name_file = open(filepath, "w", encoding='utf8')
|
||||
with open(filepath, "w", encoding='utf8') as name_file:
|
||||
name_file.write(new_text)
|
||||
name_file.close()
|
||||
except (OSError, IOError) as msg:
|
||||
CLIDbManager.ERROR(_("Could not rename Family Tree"),
|
||||
str(msg))
|
||||
@ -543,11 +538,10 @@ def find_locker_name(dirpath):
|
||||
"""
|
||||
try:
|
||||
fname = os.path.join(dirpath, "lock")
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
username = ifile.read().strip()
|
||||
# feature request 2356: avoid genitive form
|
||||
last = _("Locked by %s") % username
|
||||
ifile.close()
|
||||
except (OSError, IOError, UnicodeDecodeError):
|
||||
last = _("Unknown")
|
||||
return last
|
||||
|
@ -203,26 +203,23 @@ class DbState(Callback):
|
||||
dirpath = os.path.join(dbdir, dpath)
|
||||
path_name = os.path.join(dirpath, "name.txt")
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
if dbname == name:
|
||||
locked = False
|
||||
locked_by = None
|
||||
backend = None
|
||||
fname = os.path.join(dirpath, "database.txt")
|
||||
if os.path.isfile(fname):
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
backend = ifile.read().strip()
|
||||
ifile.close()
|
||||
else:
|
||||
backend = "bsddb"
|
||||
try:
|
||||
fname = os.path.join(dirpath, "lock")
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
locked_by = ifile.read().strip()
|
||||
locked = True
|
||||
ifile.close()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
return (dirpath, locked, locked_by, backend)
|
||||
|
@ -103,9 +103,8 @@ class FilterList(object):
|
||||
if os.path.isfile(self.file):
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(FilterParser(self))
|
||||
the_file = open(self.file, 'r', encoding='utf8')
|
||||
with open(self.file, 'r', encoding='utf8') as the_file:
|
||||
parser.parse(the_file)
|
||||
the_file.close()
|
||||
except (IOError, OSError):
|
||||
print("IO/OSError in _filterlist.py")
|
||||
except SAXParseException:
|
||||
|
@ -603,9 +603,8 @@ class GVDotDoc(GVDocBase):
|
||||
if self._filename[-3:] != ".gv":
|
||||
self._filename += ".gv"
|
||||
|
||||
dotfile = open(self._filename, "wb")
|
||||
with open(self._filename, "wb") as dotfile:
|
||||
dotfile.write(self._dot.getvalue())
|
||||
dotfile.close()
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -146,7 +146,7 @@ class StyleSheetList(object):
|
||||
"""
|
||||
Saves the current StyleSheet definitions to the associated file.
|
||||
"""
|
||||
xml_file = open(self.__file, "w")
|
||||
with open(self.__file, "w") as xml_file:
|
||||
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
||||
xml_file.write('<stylelist>\n')
|
||||
|
||||
@ -170,7 +170,6 @@ class StyleSheetList(object):
|
||||
|
||||
xml_file.write('</sheet>\n')
|
||||
xml_file.write('</stylelist>\n')
|
||||
xml_file.close()
|
||||
|
||||
def write_paragraph_style(self, xml_file, sheet, p_name):
|
||||
|
||||
@ -275,9 +274,8 @@ class StyleSheetList(object):
|
||||
if os.path.isfile(self.__file):
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(SheetParser(self))
|
||||
the_file = open(self.__file)
|
||||
with open(self.__file) as the_file:
|
||||
parser.parse(the_file)
|
||||
the_file.close()
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
|
@ -458,7 +458,7 @@ class BookList(object):
|
||||
"""
|
||||
Saves the current BookList to the associated file.
|
||||
"""
|
||||
f = open(self.file, "w")
|
||||
with open(self.file, "w") as f:
|
||||
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
|
||||
f.write('<booklist>\n')
|
||||
for name in sorted(self.bookmap): # enable a diff of archived copies
|
||||
@ -519,7 +519,6 @@ class BookList(object):
|
||||
f.write(' </book>\n')
|
||||
|
||||
f.write('</booklist>\n')
|
||||
f.close()
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
|
@ -504,9 +504,8 @@ class OptionListCollection(_options.OptionListCollection):
|
||||
if os.path.isfile(self.filename):
|
||||
p = make_parser()
|
||||
p.setContentHandler(OptionParser(self))
|
||||
the_file = open(self.filename, encoding="utf-8")
|
||||
with open(self.filename, encoding="utf-8") as the_file:
|
||||
p.parse(the_file)
|
||||
the_file.close()
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
@ -1000,9 +999,8 @@ class DocOptionListCollection(_options.OptionListCollection):
|
||||
if os.path.isfile(self.filename):
|
||||
p = make_parser()
|
||||
p.setContentHandler(DocOptionParser(self))
|
||||
the_file = open(self.filename, encoding="utf-8")
|
||||
with open(self.filename, encoding="utf-8") as the_file:
|
||||
p.parse(the_file)
|
||||
the_file.close()
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
|
@ -279,9 +279,8 @@ def resize_to_jpeg_buffer(source, size, crop=None):
|
||||
scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR)
|
||||
os.close(filed)
|
||||
scaled.savev(dest, "jpeg", "", "")
|
||||
ofile = open(dest, mode='rb')
|
||||
with open(dest, mode='rb') as ofile:
|
||||
data = ofile.read()
|
||||
ofile.close()
|
||||
try:
|
||||
os.unlink(dest)
|
||||
except:
|
||||
|
@ -285,10 +285,8 @@ class HtmlDoc(BaseDoc, TextDoc):
|
||||
Copy support files to the datadir that needs to hold them
|
||||
"""
|
||||
#css of textdoc styles
|
||||
tdfile = open(os.path.join(self._backend.datadirfull(),
|
||||
_TEXTDOCSCREEN), 'w')
|
||||
with open(os.path.join(self._backend.datadirfull(), _TEXTDOCSCREEN), 'w') as tdfile:
|
||||
tdfile.write(self.style_declaration)
|
||||
tdfile.close()
|
||||
#css file
|
||||
if self.css_filename:
|
||||
#we do an extra check in case file does not exist, eg cli call
|
||||
|
@ -30,12 +30,11 @@ from test import test_util
|
||||
test_util.path_append_parent()
|
||||
|
||||
def get_potfile(filename):
|
||||
fp = open(filename, "r")
|
||||
with open(filename, "r") as fp:
|
||||
retvals = []
|
||||
for line in fp:
|
||||
if line and line[0] != "#":
|
||||
retvals.append(line.strip())
|
||||
fp.close()
|
||||
return retvals
|
||||
|
||||
# POTFILES.skip
|
||||
@ -55,9 +54,8 @@ class TestPOT(unittest.TestCase):
|
||||
realpath = (dir + "/" + file)
|
||||
pathfile = realpath[3:]
|
||||
if os.path.exists(realpath):
|
||||
fp = open(realpath, "r")
|
||||
with open(realpath, "r") as fp:
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
found = False
|
||||
for search in searches:
|
||||
if search in lines:
|
||||
@ -88,9 +86,8 @@ class TestMake(unittest.TestCase):
|
||||
if pathfile[3:] in excluded_files:
|
||||
self.assertTrue(True, "exclude '%s'" % pathfile)
|
||||
elif os.path.exists(makefile):
|
||||
fp = open(makefile, "r")
|
||||
with open(makefile, "r") as fp:
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" %
|
||||
(filename, path))
|
||||
else:
|
||||
@ -107,9 +104,8 @@ class TestGetText(unittest.TestCase):
|
||||
def helper(self, pofile, searches):
|
||||
if not os.path.exists("../../" + pofile):
|
||||
self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile)
|
||||
fp = open("../../" + pofile, "r")
|
||||
with open("../../" + pofile, "r") as fp:
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
found = False
|
||||
for search in searches:
|
||||
found = (search in lines) or found
|
||||
|
@ -113,7 +113,6 @@ def tests():
|
||||
print ('Please, install %(program)s for listing groups of messages'
|
||||
% {'program': msgattribCmd})
|
||||
|
||||
|
||||
try:
|
||||
print("\n===='xgettext' =(generate a new template)==============\n")
|
||||
os.system('''%(program)s -V''' % {'program': xgettextCmd})
|
||||
@ -127,12 +126,10 @@ def tests():
|
||||
except:
|
||||
print ('Please, install python')
|
||||
|
||||
|
||||
def TipsParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'tips.xml'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
@ -166,7 +163,7 @@ def TipsParse(filename, mark):
|
||||
"Editor."
|
||||
'''
|
||||
|
||||
tips = open('../data/tips.xml.in.h', 'w')
|
||||
with open('../data/tips.xml.in.h', 'w') as tips:
|
||||
marklist = root.iter(mark)
|
||||
for key in marklist:
|
||||
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
|
||||
@ -182,7 +179,6 @@ def TipsParse(filename, mark):
|
||||
tip = tip.replace("</_tip>\n\n", "")
|
||||
tip = tip.replace('"', '"')
|
||||
tips.write('char *s = N_("%s");\n' % tip)
|
||||
tips.close()
|
||||
print ('Wrote ../data/tips.xml.in.h')
|
||||
root.clear()
|
||||
|
||||
@ -190,7 +186,6 @@ def HolidaysParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'holidays.xml'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
@ -214,8 +209,7 @@ def HolidaysParse(filename, mark):
|
||||
msgid "Jewish Holidays"
|
||||
msgid "Yom Kippur"
|
||||
'''
|
||||
|
||||
holidays = open('../data/holidays.xml.in.h', 'w')
|
||||
with open('../data/holidays.xml.in.h', 'w') as holidays:
|
||||
for key in ellist:
|
||||
if key.attrib.get(mark):
|
||||
line = key.attrib
|
||||
@ -223,7 +217,6 @@ def HolidaysParse(filename, mark):
|
||||
# mapping via the line dict (_name is the key)
|
||||
name = 'char *s = N_("%(_name)s");\n' % line
|
||||
holidays.write(name)
|
||||
holidays.close()
|
||||
print ('Wrote ../data/holidays.xml.in.h')
|
||||
root.clear()
|
||||
|
||||
@ -232,7 +225,6 @@ def XmlParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'file.xml.in'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
@ -262,8 +254,7 @@ def XmlParse(filename, mark):
|
||||
</p>
|
||||
'''
|
||||
|
||||
head = open(filename + '.h', 'w')
|
||||
|
||||
with open(filename + '.h', 'w') as head:
|
||||
for key in root.iter():
|
||||
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
|
||||
comment = 'char *s = N_("%s");\n' % key.text
|
||||
@ -275,11 +266,9 @@ def XmlParse(filename, mark):
|
||||
comment = 'char *s = N_("%s");\n' % key.text
|
||||
head.write(comment)
|
||||
|
||||
head.close()
|
||||
print ('Wrote %s' % filename)
|
||||
root.clear()
|
||||
|
||||
|
||||
def DesktopParse(filename):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'gramps.desktop'.
|
||||
@ -300,12 +289,10 @@ def DesktopParse(filename):
|
||||
"Manage genealogical information,
|
||||
perform genealogical research and analysis"
|
||||
'''
|
||||
with open('../data/gramps.desktop.in.h', 'w') as desktop:
|
||||
|
||||
desktop = open('../data/gramps.desktop.in.h', 'w')
|
||||
|
||||
f = open(filename)
|
||||
with open(filename) as f:
|
||||
lines = [file.strip() for file in f]
|
||||
f.close()
|
||||
|
||||
for line in lines:
|
||||
if line[0] == '_':
|
||||
@ -314,10 +301,8 @@ def DesktopParse(filename):
|
||||
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
|
||||
desktop.write(val)
|
||||
|
||||
desktop.close()
|
||||
print ('Wrote ../data/gramps.desktop.in.h')
|
||||
|
||||
|
||||
def KeyParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
|
||||
@ -342,12 +327,10 @@ def KeyParse(filename, mark):
|
||||
msgid "Gramps XML database"
|
||||
msgid "GEDCOM"
|
||||
'''
|
||||
with open('../data/gramps.keys.in.h', 'w') as key:
|
||||
|
||||
key = open('../data/gramps.keys.in.h', 'w')
|
||||
|
||||
f = open(filename)
|
||||
with open(filename) as f:
|
||||
lines = [file for file in f]
|
||||
f.close()
|
||||
|
||||
temp = []
|
||||
|
||||
@ -362,10 +345,8 @@ def KeyParse(filename, mark):
|
||||
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
|
||||
key.write(val)
|
||||
|
||||
key.close()
|
||||
print ('Wrote ../data/gramps.keys.in.h')
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
The utility for handling translation stuff.
|
||||
@ -418,7 +399,6 @@ def main():
|
||||
choices=[file for file in os.listdir('.') if file.endswith('.po')],
|
||||
help="list fuzzy messages")
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
namespace, extra = parser.parse_known_args()
|
||||
|
||||
@ -467,14 +447,13 @@ def create_filesfile():
|
||||
dir = os.getcwd()
|
||||
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
|
||||
lentopdir = len(topdir)
|
||||
f = open('POTFILES.in')
|
||||
with open('POTFILES.in') as f:
|
||||
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
|
||||
and not file[0]=='#')
|
||||
f.close()
|
||||
f = open('POTFILES.skip')
|
||||
|
||||
with open('POTFILES.skip') as f:
|
||||
notinfiles = dict(['../' + file.strip(), None] for file in f if file
|
||||
and not file[0]=='#')
|
||||
f.close()
|
||||
|
||||
for (dirpath, dirnames, filenames) in os.walk(topdir):
|
||||
root, subdir = os.path.split(dirpath)
|
||||
@ -499,11 +478,10 @@ def create_filesfile():
|
||||
if full_filename[lentopdir:] in notinfiles:
|
||||
infiles['../gramps' + full_filename[lentopdir:]] = None
|
||||
#now we write out all the files in form ../gramps/filename
|
||||
f = open('tmpfiles', 'w')
|
||||
with open('tmpfiles', 'w') as f:
|
||||
for file in sorted(infiles.keys()):
|
||||
f.write(file)
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
def listing(name, extensionlist):
|
||||
"""
|
||||
@ -512,12 +490,10 @@ def listing(name, extensionlist):
|
||||
Like POTFILES.in and POTFILES.skip
|
||||
"""
|
||||
|
||||
f = open('tmpfiles')
|
||||
with open('tmpfiles') as f:
|
||||
files = [file.strip() for file in f if file and not file[0]=='#']
|
||||
f.close()
|
||||
|
||||
temp = open(name, 'w')
|
||||
|
||||
with open(name, 'w') as temp:
|
||||
for entry in files:
|
||||
for ext in extensionlist:
|
||||
if entry.endswith(ext):
|
||||
@ -525,8 +501,6 @@ def listing(name, extensionlist):
|
||||
temp.write('\n')
|
||||
break
|
||||
|
||||
temp.close()
|
||||
|
||||
def headers():
|
||||
"""
|
||||
Look at existing C file format headers.
|
||||
@ -558,7 +532,6 @@ def extract_xml():
|
||||
Extract translation strings from XML based, keys, mime and desktop
|
||||
files. Own XML files parsing and custom translation marks.
|
||||
"""
|
||||
|
||||
HolidaysParse('../data/holidays.xml.in', '_name')
|
||||
TipsParse('../data/tips.xml.in', '_tip')
|
||||
XmlParse('../data/gramps.xml.in', '_comment')
|
||||
@ -570,8 +543,8 @@ def create_template():
|
||||
"""
|
||||
Create a new file for template, if it does not exist.
|
||||
"""
|
||||
template = open('gramps.pot', 'w')
|
||||
template.close()
|
||||
with open('gramps.pot', 'w') as template:
|
||||
pass
|
||||
|
||||
def extract_glade():
|
||||
"""
|
||||
@ -624,8 +597,7 @@ def extract_gtkbuilder():
|
||||
'''
|
||||
|
||||
files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade']
|
||||
temp = open('gtklist.h', 'w')
|
||||
|
||||
with open('gtklist.h', 'w') as temp:
|
||||
for filename in files:
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
@ -636,7 +608,6 @@ def extract_gtkbuilder():
|
||||
temp.write(col)
|
||||
root.clear()
|
||||
|
||||
temp.close()
|
||||
print ('Wrote gtklist.h')
|
||||
|
||||
def retrieve():
|
||||
@ -696,7 +667,6 @@ def merge(args):
|
||||
"""
|
||||
Merge messages with 'gramps.pot'
|
||||
"""
|
||||
|
||||
for arg in args:
|
||||
if arg == 'all':
|
||||
continue
|
||||
@ -709,7 +679,6 @@ def check(args):
|
||||
"""
|
||||
Check the translation file
|
||||
"""
|
||||
|
||||
for arg in args:
|
||||
if arg == 'all':
|
||||
continue
|
||||
@ -724,14 +693,12 @@ def untranslated(arg):
|
||||
"""
|
||||
List untranslated messages
|
||||
"""
|
||||
|
||||
os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
|
||||
|
||||
def fuzzy(arg):
|
||||
"""
|
||||
List fuzzy messages
|
||||
"""
|
||||
|
||||
os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Loading…
x
Reference in New Issue
Block a user