Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
38fc8210ab |
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
*.exe
|
*.exe
|
||||||
|
|
||||||
*.o
|
*.o
|
||||||
|
*.txt
|
||||||
|
*.log
|
||||||
|
@ -5,8 +5,6 @@ A common gateway inferface (CGI) program written in C to display Race CTS leader
|
|||||||
## Requirements
|
## Requirements
|
||||||
sqlite-devel python3 python-sqlite
|
sqlite-devel python3 python-sqlite
|
||||||
|
|
||||||
The first is only needed for compilation of the C program. The latter two are only for the auxiliary script `allmaps.py`.
|
|
||||||
|
|
||||||
## Compiling
|
## Compiling
|
||||||
`make` makes a static page generator.
|
`make` makes a static page generator.
|
||||||
|
|
||||||
|
@ -3,10 +3,11 @@ import sqlite3 as sql
|
|||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
|
||||||
from os import listdir, mkdir
|
|
||||||
from os.path import isfile, exists
|
from os.path import isfile, exists
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
import sys, traceback
|
||||||
|
|
||||||
#------------------------------------------------+
|
#------------------------------------------------+
|
||||||
# get_list_from_server_txt
|
# get_list_from_server_txt
|
||||||
#------------------------------------------------+
|
#------------------------------------------------+
|
||||||
@ -46,8 +47,10 @@ def get_list_from_server_txt(filename):
|
|||||||
output.append(row)
|
output.append(row)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def init_logging():
|
def init_logging(folder, base_file_name="dbimport-%s.log"):
|
||||||
filename = "_logs/dbimport-%s.log"
|
if not exists(folder):
|
||||||
|
return False
|
||||||
|
filename = "%s/%s" % (folder, base_file_name)
|
||||||
i = 0
|
i = 0
|
||||||
while exists(filename % i):
|
while exists(filename % i):
|
||||||
i += 1
|
i += 1
|
||||||
@ -57,7 +60,7 @@ def init_logging():
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
#------------------------------------------------+
|
#------------------------------------------------+
|
||||||
# Functions: Clean up.
|
# uid2namefix
|
||||||
#------------------------------------------------+
|
#------------------------------------------------+
|
||||||
# Unlike other rows,
|
# Unlike other rows,
|
||||||
# the separator character, '/' is part of the value of the second column.
|
# the separator character, '/' is part of the value of the second column.
|
||||||
@ -66,6 +69,7 @@ def init_logging():
|
|||||||
# \/uid2name/Mnumg2Yh/yxNFDTqGI+YyhlM7QDI0fpEmAaBJ8cI5dU=\Tuxxy
|
# \/uid2name/Mnumg2Yh/yxNFDTqGI+YyhlM7QDI0fpEmAaBJ8cI5dU=\Tuxxy
|
||||||
# it should become:
|
# it should become:
|
||||||
# ["uid2name", "Mnumg2Yh/yxNFDTqGI+YyhlM7QDI0fpEmAaBJ8cI5dU=", "Tuxxy"]
|
# ["uid2name", "Mnumg2Yh/yxNFDTqGI+YyhlM7QDI0fpEmAaBJ8cI5dU=", "Tuxxy"]
|
||||||
|
|
||||||
def uid2namefix(row):
|
def uid2namefix(row):
|
||||||
# quick fix
|
# quick fix
|
||||||
# replace first and last occurrence of backslash
|
# replace first and last occurrence of backslash
|
||||||
@ -118,74 +122,74 @@ def filters(db):
|
|||||||
sid.append([ e[0], unquote(e[-1]) ])
|
sid.append([ e[0], unquote(e[-1]) ])
|
||||||
return tt, tr, ti, s, sid
|
return tt, tr, ti, s, sid
|
||||||
|
|
||||||
#------------------------------------------------+
|
def insert_to_database(d, s):
|
||||||
# Functions: Database Creation
|
def insert(c, q, d):
|
||||||
#------------------------------------------------+
|
for x in d:
|
||||||
|
# possible to do executemany
|
||||||
|
# but want to be able to catch the problematic rows
|
||||||
|
# as it is iterated through.
|
||||||
|
# and proceed with adding OK rows.
|
||||||
|
try:
|
||||||
|
c.execute(q, x)
|
||||||
|
except sql.ProgrammingError as e:
|
||||||
|
print(e)
|
||||||
|
print(x)
|
||||||
|
return
|
||||||
|
|
||||||
def inserttodb(c, q, d):
|
|
||||||
for x in d:
|
|
||||||
# possible to do executemany
|
|
||||||
# but want to be able to catch the problematic rows
|
|
||||||
# as it is iterated through.
|
|
||||||
# and proceed with adding OK rows.
|
|
||||||
try:
|
|
||||||
c.execute(q, x)
|
|
||||||
except sql.ProgrammingError as e:
|
|
||||||
print(e)
|
|
||||||
print(x)
|
|
||||||
|
|
||||||
#------------------------------------------------+
|
|
||||||
|
|
||||||
# insert new data directly into new database file
|
|
||||||
def i(d, s):
|
|
||||||
con = sql.connect(d)
|
con = sql.connect(d)
|
||||||
with con:
|
with con:
|
||||||
csr = con.cursor()
|
csr = con.cursor()
|
||||||
try:
|
try:
|
||||||
times, ranks, ids, speed, speed_ids = filters(get_list_from_server_txt(s))
|
times, \
|
||||||
|
ranks, \
|
||||||
|
ids, \
|
||||||
|
speed, \
|
||||||
|
speed_ids = filters(get_list_from_server_txt(s))
|
||||||
if times:
|
if times:
|
||||||
inserttodb(csr, "INSERT OR REPLACE INTO Cts_times VALUES(?, ?, ?, ?)", times)
|
insert(csr, "INSERT OR REPLACE INTO Cts_times VALUES(?, ?, ?, ?)", times)
|
||||||
logging.info('\n'.join(y for y in [str(x) for x in times]))
|
logging.info('\n'.join(y for y in [str(x) for x in times]))
|
||||||
if ranks:
|
if ranks:
|
||||||
inserttodb(csr, "INSERT OR REPLACE INTO Cts_ranks VALUES(?, ?, ?, ?)", ranks)
|
insert(csr, "INSERT OR REPLACE INTO Cts_ranks VALUES(?, ?, ?, ?)", ranks)
|
||||||
logging.info('\n'.join(y for y in [str(x) for x in ranks]))
|
logging.info('\n'.join(y for y in [str(x) for x in ranks]))
|
||||||
if ids:
|
if ids:
|
||||||
inserttodb(csr, "INSERT OR REPLACE INTO Id2alias VALUES(?, ?, ?)", ids)
|
insert(csr, "INSERT OR REPLACE INTO Id2alias VALUES(?, ?, ?)", ids)
|
||||||
logging.info('\n'.join(y for y in [str(x) for x in ids]))
|
logging.info('\n'.join(y for y in [str(x) for x in ids]))
|
||||||
if speed:
|
if speed:
|
||||||
inserttodb(csr, "INSERT OR REPLACE INTO Speed VALUES(?, ?)", speed)
|
insert(csr, "INSERT OR REPLACE INTO Speed VALUES(?, ?)", speed)
|
||||||
if speed_ids:
|
if speed_ids:
|
||||||
inserttodb(csr, "INSERT OR REPLACE INTO Fastest_players VALUES(?, ?)", speed_ids)
|
insert(csr, "INSERT OR REPLACE INTO Fastest_players VALUES(?, ?)", speed_ids)
|
||||||
except sql.Error:
|
except sql.Error:
|
||||||
logging.exception("sql error encountered in function 'i'")
|
logging.exception("sql error encountered in function 'i'")
|
||||||
if con:
|
if con:
|
||||||
con.rollback()
|
con.rollback()
|
||||||
|
|
||||||
# 'insert' new data into a file i.e sql query file
|
def write_query(out_file, data):
|
||||||
def f(d, s):
|
if exists(out_file):
|
||||||
with open(d, 'w', encoding='utf-8') as h:
|
print("stopped: output file already exists", file=sys.stderr)
|
||||||
times, ranks, ids, speed, speed_ids = filters(get_list_from_server_txt(s))
|
return False
|
||||||
|
times, \
|
||||||
|
ranks, \
|
||||||
|
ids, \
|
||||||
|
speed, \
|
||||||
|
speed_ids = filters(get_list_from_server_txt(data))
|
||||||
|
with open(out_file, 'w', encoding='utf-8') as file_handle:
|
||||||
for t in times:
|
for t in times:
|
||||||
h.write("INSERT OR REPLACE INTO Cts_times VALUES(%s, %s, %s, %s)\n" % tuple(t))
|
file_handle.write("INSERT OR REPLACE INTO Cts_times VALUES(\'%s\', \'%s\', %s, %s);\n" % tuple(t))
|
||||||
pass
|
|
||||||
for r in ranks:
|
for r in ranks:
|
||||||
h.write("INSERT OR REPLACE INTO Cts_ranks VALUES(%s, %s, %s, %s)\n" % tuple(r))
|
file_handle.write("INSERT OR REPLACE INTO Cts_ranks VALUES(\'%s\', \'%s\', %s, \'%s\');\n" % tuple(r))
|
||||||
pass
|
|
||||||
for i in ids:
|
for i in ids:
|
||||||
h.write("INSERT OR REPLACE INTO Id2aslias VALUES(%s, %s, %s)\n" % tuple(i))
|
file_handle.write("INSERT OR REPLACE INTO Id2alias VALUES(\'%s\', \'%s\', \'%s\');\n" % tuple(i))
|
||||||
pass
|
return True
|
||||||
pass
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Test whether repeat rows are added.
|
# Test whether repeat rows are added.
|
||||||
def duplicatestest(d, s):
|
def check_duplicates(database, data):
|
||||||
c = sql.connect(d)
|
c = sql.connect(database)
|
||||||
p = True
|
p = True
|
||||||
with c:
|
with c:
|
||||||
cs = c.cursor()
|
cs = c.cursor()
|
||||||
try:
|
try:
|
||||||
logging.info("Inserting into database (1/2)")
|
logging.info("Inserting into database (1/2)")
|
||||||
i(d, s)
|
insert_to_database(database, data)
|
||||||
logging.info("Querying (1/2)")
|
logging.info("Querying (1/2)")
|
||||||
cs.execute("SELECT * FROM Cts_times")
|
cs.execute("SELECT * FROM Cts_times")
|
||||||
a = cs.fetchall()
|
a = cs.fetchall()
|
||||||
@ -194,7 +198,7 @@ def duplicatestest(d, s):
|
|||||||
cs.execute("SELECT * FROM Id2alias")
|
cs.execute("SELECT * FROM Id2alias")
|
||||||
c = cs.fetchall()
|
c = cs.fetchall()
|
||||||
logging.info("Inserting into database (2/2)")
|
logging.info("Inserting into database (2/2)")
|
||||||
i(d, s)
|
insert_to_database(database, data)
|
||||||
logging.info("Querying (2/2)")
|
logging.info("Querying (2/2)")
|
||||||
cs.execute("SELECT * FROM Cts_times")
|
cs.execute("SELECT * FROM Cts_times")
|
||||||
x = cs.fetchall()
|
x = cs.fetchall()
|
||||||
@ -218,17 +222,34 @@ def duplicatestest(d, s):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
ap = argparse.ArgumentParser()
|
ap = argparse.ArgumentParser()
|
||||||
ap.add_argument('db')
|
ap.add_argument('dest',
|
||||||
ap.add_argument('src')
|
help="destination, a sqlite3 database (or query file, if given -q flag)")
|
||||||
ap.add_argument('-t', '--test', action='store_true')
|
ap.add_argument('src',
|
||||||
ap.add_argument('-q', '--sql', action='store_true')
|
help="source, should be data generated by a Xonotic server")
|
||||||
|
ap.add_argument('-q', '--export-query',
|
||||||
|
action='store_true',
|
||||||
|
help="write query file (as opposed to executing / inserting rows into database)")
|
||||||
|
ap.add_argument('-t', '--test',
|
||||||
|
action='store_true',
|
||||||
|
help="test database for duplicates")
|
||||||
|
ap.add_argument('-l', '--log-dir',
|
||||||
|
type=str,
|
||||||
|
help="set folder to store log files")
|
||||||
args = ap.parse_args()
|
args = ap.parse_args()
|
||||||
log_file = init_logging()
|
log_dir = args.log_dir or "logs"
|
||||||
print("Writing log to ", log_file)
|
log_file = init_logging(log_dir)
|
||||||
if args.test:
|
if log_file:
|
||||||
duplicatestest(args.db, args.src)
|
print("writing log to folder '%s'," % log_dir, log_file, file=sys.stderr)
|
||||||
if args.sql:
|
|
||||||
f(args.db, args.src)
|
|
||||||
else:
|
else:
|
||||||
i(args.db, args.src)
|
print("exited: logging not initialized (folder '%s' does not exist)" % log_dir, file=sys.stderr)
|
||||||
|
exit()
|
||||||
|
try:
|
||||||
|
if args.test:
|
||||||
|
check_duplicates(args.dest, args.src)
|
||||||
|
if args.export_query:
|
||||||
|
write_query(args.dest, args.src)
|
||||||
|
else:
|
||||||
|
insert_to_database(args.dest, args.src)
|
||||||
|
except FileNotFoundError:
|
||||||
|
traceback.print_exc()
|
||||||
|
print("\n\t exited: no input file to work with.", file=sys.stderr)
|
||||||
|
Loading…
Reference in New Issue
Block a user