Files
dougal-software/bin/system_imports.py

65 lines
1.7 KiB
Python
Raw Permalink Normal View History

2020-09-02 15:04:04 +02:00
#!/usr/bin/python3
"""
Re-import Dougal-exported data created by
system_exports.py
"""
import os
from glob import glob
import configuration
import preplots
from datastore import Datastore, psycopg2
2020-09-02 15:04:04 +02:00
exportables = [
"events_seq",
"events_seq_labels",
"events_timed",
"events_timed_labels"
]
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
print("Connecting to database")
db = Datastore()
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
with db.conn.cursor() as cursor:
cursor.execute("SET session_replication_role = replica;")
2020-09-02 15:04:04 +02:00
try:
2020-09-10 20:37:52 +02:00
pathPrefix = survey["exports"]["machine"]["path"]
except KeyError:
print("Survey does not define an export path for machine data")
2020-09-02 15:04:04 +02:00
continue
try:
for table in exportables:
path = os.path.join(pathPrefix, table)
if os.path.exists(path):
cursor.execute(f"DELETE FROM {table};")
for table in exportables:
path = os.path.join(pathPrefix, table)
print("", path, "", table)
with open(path, "rb") as fd:
2020-09-27 19:19:19 +02:00
cursor.copy_from(fd, table)
except psycopg2.errors.UniqueViolation:
print("It looks like data for this survey may have already been imported (unique constraint violation)")
2020-09-02 15:04:04 +02:00
2020-09-03 17:08:25 +02:00
# If we don't commit the data does not actually get copied
db.conn.commit()
cursor.execute("SET session_replication_role = DEFAULT;")
# Update the sequences that generate event ids
2020-09-23 15:48:21 +02:00
cursor.execute("SELECT reset_events_serials();")
# Let us ensure events_timed_seq is up to date, even though
# the triggers will have taken care of this already.
cursor.execute("CALL events_timed_seq_update_all();")
2020-09-03 17:08:25 +02:00
2020-09-02 15:04:04 +02:00
print("Done")