Files
dougal-software/bin/system_dump.py
D. Berge e3a3bdb153 Clean up whitespace.
Commands used:

find . -type f -name '*.js'| while read FILE; do if echo $FILE |grep -qv node_modules; then sed -ri 's/^\s+$//' "$FILE"; fi; done
find . -type f -name '*.vue'| while read FILE; do if echo $FILE |grep -qv node_modules; then sed -ri 's/^\s+$//' "$FILE"; fi; done
find . -type f -name '*.py'| while read FILE; do if echo $FILE |grep -qv node_modules; then sed -ri 's/^\s+$//' "$FILE"; fi; done
2022-04-29 14:48:21 +02:00

96 lines
2.5 KiB
Python
Executable File

#!/usr/bin/python3
"""
Export data that is entered directly into Dougal
as opposed to being read from external sources.
This data will be read back in when the database
is recreated for an existing survey.
Unlike system_exports.py, which exports whole tables
via COPY, this exports a selection of columns from
tables containing both directly entered and imported
data.
"""
import os
from glob import glob
import configuration
import preplots
from datastore import Datastore
locals().update(configuration.vars())
exportables = {
"public": {
"projects": [ "meta" ],
"info": None,
"real_time_inputs": None
},
"survey": {
"final_lines": [ "remarks", "meta" ],
"final_shots": [ "meta" ],
"preplot_lines": [ "remarks", "ntba", "meta" ],
"preplot_points": [ "ntba", "meta" ],
"raw_lines": [ "remarks", "meta" ],
"raw_shots": [ "meta" ],
"planned_lines": None
}
}
def primary_key (table, cursor):
# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
qry = """
SELECT a.attname, format_type(a.atttypid, a.atttypmod) AS data_type
FROM pg_index i
JOIN pg_attribute a
ON a.attrelid = i.indrelid
AND a.attnum = ANY(i.indkey)
WHERE i.indrelid = %s::regclass
AND i.indisprimary;
"""
cursor.execute(qry, (table,))
return cursor.fetchall()
if __name__ == '__main__':
print("Reading configuration")
surveys = configuration.surveys()
print("Connecting to database")
db = Datastore()
db.connect()
for table in exportables["public"]:
with db.conn.cursor() as cursor:
pk = [ r[0] for r in primary_key(table, cursor) ]
columns = (pk + exportables["public"][table]) if exportables["public"][table] is not None else None
path = os.path.join(VARDIR, "-"+table)
with open(path, "wb") as fd:
print(" →→ ", path, " ←← ", table, columns)
cursor.copy_to(fd, table, columns=columns)
print("Reading surveys")
for survey in surveys:
print(f'Survey: {survey["id"]} ({survey["schema"]})')
db.set_survey(survey["schema"])
with db.conn.cursor() as cursor:
try:
pathPrefix = survey["exports"]["machine"]["path"]
except KeyError:
print("Survey does not define an export path for machine data")
continue
for table in exportables["survey"]:
pk = [ r[0] for r in primary_key(table, cursor) ]
columns = (pk + exportables["survey"][table]) if exportables["survey"][table] is not None else None
path = os.path.join(pathPrefix, "-"+table)
print(" →→ ", path, " ←← ", table, columns)
with open(path, "wb") as fd:
cursor.copy_to(fd, table, columns=columns)
print("Done")