Do not lose data during database upgrades.

The database upgrade script is updated to export
also user-entered data stored in columns of tables
containing also derived data, and to re-import
everything after the upgrade.
This commit is contained in:
D. Berge
2020-09-27 19:36:28 +02:00
parent a05ecfd41c
commit ca41bd8132

View File

@@ -12,21 +12,10 @@ print(
\u001b[31;4mYou are about to upgrade the database to the latest schema in /etc/db/schema-template.sql.\u001b[0m \u001b[31;4mYou are about to upgrade the database to the latest schema in /etc/db/schema-template.sql.\u001b[0m
\u001b[33mThis will cause all existing surveys to be wiped out and re-created. \u001b[33mThis will cause all existing surveys to be wiped out and re-created.
Data which is captured by the database itself, e.g., event data will be exported¹ Data which is captured by the database itself, e.g., event data will be exported
and re-imported. The rest should be brought back in at the next bin/runner.sh and re-imported. The rest should be brought back in at the next bin/runner.sh
run. run.
¹ As of this version, not all data will be exported. The following information
will be lost:
- Sequence NTBA status
- Shot NTBA status
- Shot NTBP status
- Raw sequence remarks
- Raw shot remarks
- Preplot point remarks
\u001b[33;1m- All real-time data will also be lost\u001b[33m
\u001b[0m \u001b[0m
If this is what you want to do, press ENTER, else press Ctrl+C to cancel. If this is what you want to do, press ENTER, else press Ctrl+C to cancel.
@@ -34,22 +23,57 @@ If this is what you want to do, press ENTER, else press Ctrl+C to cancel.
input() input()
#
# If we got here it's because the user has asked us to proceed
#
locals().update(configuration.vars())
if os.path.exists(LOCKFILE):
print(f"Dougal's runner is currently active. Retry in a few moments or remove {LOCKFILE} if you think it's stale")
exit(1)
# Let's stop runner.sh from messing with our upgrade
with open(LOCKFILE, "w") as fd:
fd.write(str(os.getpid()))
#
# Define some variables that we will need later on
#
dbname=os.environ["PGDATABASE"] if "PGDATABASE" in os.environ else "dougal" dbname=os.environ["PGDATABASE"] if "PGDATABASE" in os.environ else "dougal"
dbuser=os.environ["PGUSER"] if "PGUSER" in os.environ else "postgres" dbuser=os.environ["PGUSER"] if "PGUSER" in os.environ else "postgres"
dbtemplate=os.path.abspath(os.path.join(bin_path, "..", "etc", "db", "database-template.sql")) dbtemplate=os.path.abspath(os.path.join(bin_path, "..", "etc", "db", "database-template.sql"))
system_exports = os.path.join(bin_path, "system_exports.py") system_exports = os.path.join(bin_path, "system_exports.py")
system_dump = os.path.join(bin_path, "system_dump.py")
system_load = os.path.join(bin_path, "system_load.py")
create_survey = os.path.join(bin_path, "create_survey.sh") create_survey = os.path.join(bin_path, "create_survey.sh")
runner = "RUNNER_NOEXPORT=true RUNNER_IMPORT=true " + os.path.join(bin_path, "runner.sh") runner = "RUNNER_NOEXPORT=true RUNNER_IMPORT=true " + os.path.join(bin_path, "runner.sh")
#
# Export data
#
# The usual exports
print(system_exports) print(system_exports)
os.system(system_exports) os.system(system_exports)
# And the exports from “mixed” data
print(system_dump)
os.system(system_dump)
#
# Do database stuff this is where we start breaking things
#
# Everyone gets kicked out
print(f"Terminate all sessions in {dbname}") print(f"Terminate all sessions in {dbname}")
cmd = f"psql -U {dbuser} -d {dbname} -c 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = current_database() AND pid <> pg_backend_pid();'" cmd = f"psql -U {dbuser} -d {dbname} -c 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = current_database() AND pid <> pg_backend_pid();'"
print(cmd) print(cmd)
res = os.system(cmd) res = os.system(cmd)
# The database gets dropped
cmd = f"psql -U {dbuser} -d template1 -c 'DROP DATABASE {dbname};'" cmd = f"psql -U {dbuser} -d template1 -c 'DROP DATABASE {dbname};'"
print(cmd) print(cmd)
res = os.system(cmd) res = os.system(cmd)
@@ -57,6 +81,7 @@ if res != 0:
print("Cannot proceed. Upgrade aborted.") print("Cannot proceed. Upgrade aborted.")
exit(res) exit(res)
# The new database gets created
cmd = f"psql -U {dbuser} -d template1 <{dbtemplate}" cmd = f"psql -U {dbuser} -d template1 <{dbtemplate}"
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
@@ -66,6 +91,7 @@ os.system(cmd)
#cmd = f"psql dougal -c 'DROP schema {schema} CASCADE;'" #cmd = f"psql dougal -c 'DROP schema {schema} CASCADE;'"
#print(cmd) #print(cmd)
# The surveys get re-created
for survey in configuration.surveys(include_archived=True): for survey in configuration.surveys(include_archived=True):
id = survey["id"].lower() id = survey["id"].lower()
name = survey["name"] name = survey["name"]
@@ -76,7 +102,17 @@ for survey in configuration.surveys(include_archived=True):
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
#
# Here is where we re-import the data
#
os.remove(LOCKFILE) # Let runner.sh run again
# Import the bulk of the data
print(runner) print(runner)
os.system(runner) os.system(runner)
# Import the data that lives only in the database
print(system_load)
os.system(system_load)
print("Done") print("Done")