2020-09-02 15:04:54 +02:00
|
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
import os
|
|
|
|
|
|
# Because Python 🙄
|
|
|
|
|
|
bin_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../bin"))
|
|
|
|
|
|
sys.path.insert(1, bin_path)
|
|
|
|
|
|
import configuration
|
|
|
|
|
|
|
|
|
|
|
|
print(
|
|
|
|
|
|
"""
|
|
|
|
|
|
\u001b[31;4mYou are about to upgrade the database to the latest schema in /etc/db/schema-template.sql.\u001b[0m
|
|
|
|
|
|
|
|
|
|
|
|
\u001b[33mThis will cause all existing surveys to be wiped out and re-created.
|
2020-09-27 19:36:28 +02:00
|
|
|
|
Data which is captured by the database itself, e.g., event data will be exported
|
2020-09-02 15:04:54 +02:00
|
|
|
|
and re-imported. The rest should be brought back in at the next bin/runner.sh
|
|
|
|
|
|
run.
|
|
|
|
|
|
|
|
|
|
|
|
\u001b[0m
|
|
|
|
|
|
|
|
|
|
|
|
If this is what you want to do, press ENTER, else press Ctrl+C to cancel.
|
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
|
|
input()
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
#
|
|
|
|
|
|
# If we got here it's because the user has asked us to proceed
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
locals().update(configuration.vars())
|
|
|
|
|
|
|
|
|
|
|
|
if os.path.exists(LOCKFILE):
|
|
|
|
|
|
print(f"Dougal's runner is currently active. Retry in a few moments or remove {LOCKFILE} if you think it's stale")
|
|
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
# Let's stop runner.sh from messing with our upgrade
|
|
|
|
|
|
with open(LOCKFILE, "w") as fd:
|
|
|
|
|
|
fd.write(str(os.getpid()))
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
# Define some variables that we will need later on
|
|
|
|
|
|
#
|
|
|
|
|
|
|
2020-09-02 15:04:54 +02:00
|
|
|
|
dbname=os.environ["PGDATABASE"] if "PGDATABASE" in os.environ else "dougal"
|
|
|
|
|
|
dbuser=os.environ["PGUSER"] if "PGUSER" in os.environ else "postgres"
|
|
|
|
|
|
dbtemplate=os.path.abspath(os.path.join(bin_path, "..", "etc", "db", "database-template.sql"))
|
|
|
|
|
|
|
|
|
|
|
|
system_exports = os.path.join(bin_path, "system_exports.py")
|
2020-09-27 19:36:28 +02:00
|
|
|
|
system_dump = os.path.join(bin_path, "system_dump.py")
|
|
|
|
|
|
system_load = os.path.join(bin_path, "system_load.py")
|
2020-09-02 15:04:54 +02:00
|
|
|
|
create_survey = os.path.join(bin_path, "create_survey.sh")
|
|
|
|
|
|
runner = "RUNNER_NOEXPORT=true RUNNER_IMPORT=true " + os.path.join(bin_path, "runner.sh")
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
#
|
|
|
|
|
|
# Export data
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
# The usual exports
|
2020-09-02 15:04:54 +02:00
|
|
|
|
print(system_exports)
|
2020-09-02 18:58:24 +02:00
|
|
|
|
os.system(system_exports)
|
2020-09-02 15:04:54 +02:00
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
# And the exports from “mixed” data
|
|
|
|
|
|
print(system_dump)
|
|
|
|
|
|
os.system(system_dump)
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
# Do database stuff – this is where we start breaking things
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
# Everyone gets kicked out
|
2020-09-23 18:18:39 +02:00
|
|
|
|
print(f"Terminate all sessions in {dbname}")
|
|
|
|
|
|
cmd = f"psql -U {dbuser} -d {dbname} -c 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = current_database() AND pid <> pg_backend_pid();'"
|
|
|
|
|
|
print(cmd)
|
|
|
|
|
|
res = os.system(cmd)
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
# The database gets dropped
|
2020-09-02 16:39:49 +02:00
|
|
|
|
cmd = f"psql -U {dbuser} -d template1 -c 'DROP DATABASE {dbname};'"
|
2020-09-02 15:04:54 +02:00
|
|
|
|
print(cmd)
|
|
|
|
|
|
res = os.system(cmd)
|
|
|
|
|
|
if res != 0:
|
|
|
|
|
|
print("Cannot proceed. Upgrade aborted.")
|
|
|
|
|
|
exit(res)
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
# The new database gets created
|
2020-09-02 16:52:33 +02:00
|
|
|
|
cmd = f"psql -U {dbuser} -d template1 <{dbtemplate}"
|
2020-09-02 15:04:54 +02:00
|
|
|
|
print(cmd)
|
|
|
|
|
|
os.system(cmd)
|
|
|
|
|
|
|
|
|
|
|
|
#for survey in configuration.surveys():
|
|
|
|
|
|
#schema = survey["schema"]
|
|
|
|
|
|
#cmd = f"psql dougal -c 'DROP schema {schema} CASCADE;'"
|
|
|
|
|
|
#print(cmd)
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
# The surveys get re-created
|
2020-09-02 18:58:50 +02:00
|
|
|
|
for survey in configuration.surveys(include_archived=True):
|
2020-09-02 15:04:54 +02:00
|
|
|
|
id = survey["id"].lower()
|
|
|
|
|
|
name = survey["name"]
|
|
|
|
|
|
epsg = survey["epsg"]
|
|
|
|
|
|
schema = survey["schema"]
|
|
|
|
|
|
|
2020-09-02 18:58:50 +02:00
|
|
|
|
cmd = f"SCHEMA_NAME={schema} {create_survey} \"{id}\" \"{name}\" {epsg}"
|
2020-09-02 15:04:54 +02:00
|
|
|
|
print(cmd)
|
|
|
|
|
|
os.system(cmd)
|
|
|
|
|
|
|
2020-09-27 19:36:28 +02:00
|
|
|
|
#
|
|
|
|
|
|
# Here is where we re-import the data
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
os.remove(LOCKFILE) # Let runner.sh run again
|
|
|
|
|
|
# Import the bulk of the data
|
2020-09-02 15:04:54 +02:00
|
|
|
|
print(runner)
|
|
|
|
|
|
os.system(runner)
|
2020-09-27 19:36:28 +02:00
|
|
|
|
# Import the data that lives only in the database
|
|
|
|
|
|
print(system_load)
|
|
|
|
|
|
os.system(system_load)
|
|
|
|
|
|
|
2020-09-02 15:04:54 +02:00
|
|
|
|
|
|
|
|
|
|
print("Done")
|