Skip to content

Commit

Permalink
Merged in bgol/tradedangerous/csvexport (pull request #52)
Browse files Browse the repository at this point in the history
Split up the actual export routine from the export sub-command
  • Loading branch information
kfsone committed Dec 20, 2014
2 parents 3a636ce + 5c1f403 commit abb2340
Show file tree
Hide file tree
Showing 2 changed files with 236 additions and 185 deletions.
204 changes: 19 additions & 185 deletions commands/export_cmd.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,21 @@
from __future__ import absolute_import, with_statement, print_function, division, unicode_literals

from commands.parsing import MutuallyExclusiveGroup, ParseArgument
from commands.exceptions import CommandLineError
from pathlib import Path

import sqlite3
import csv

######################################################################
# TradeDangerous :: Commands :: Export
#
# Generate the CSV files for the master data of the database.
#
# Note: This command makes some assumptions about the structure
# of the database:
# * The table should only have one UNIQUE index
# * The referenced table must have one UNIQUE index
# * The FK columns must have the same name in both tables
# * One column primary keys will be handled by the database engine
#
######################################################################
# CAUTION: If the database structure gets changed this script might
# need some corrections.
######################################################################

######################################################################
# Default values

# for some tables the first two columns will be reversed
reverseList = [ 'AltItemNames',
'Item',
'ShipVendor',
'Station',
'StationBuying',
'UpgradeVendor',
]

# some tables are ignored
ignoreList = [
]

######################################################################
# Parser config

Expand Down Expand Up @@ -82,66 +60,11 @@
),
]

######################################################################
# Helpers

def search_keyList(list, val):
for row in list:
if row['from'] == row['to'] == val: return row

def getUniqueIndex(conn, tableName):
# return the first unique index
idxCursor = conn.cursor()
unqIndex = []
for idxRow in idxCursor.execute("PRAGMA index_list('%s')" % tableName):
if idxRow['unique']:
# it's a unique index
unqCursor = conn.cursor()
for unqRow in unqCursor.execute("PRAGMA index_info('%s')" % idxRow['name']):
unqIndex.append(unqRow['name'])
return unqIndex
return unqIndex

def getFKeyList(conn, tableName):
# get all single column FKs
keyList = []
keyCount = -1
keyCursor = conn.cursor()
for keyRow in keyCursor.execute("PRAGMA foreign_key_list('%s')" % tableName):
if keyRow['seq'] == 0:
keyCount += 1
keyList.append( {'table': keyRow['table'],
'from': keyRow['from'],
'to': keyRow['to']}
)
if keyRow['seq'] == 1:
# if there is a second column, remove it from the list
keyList.remove( keyList[keyCount] )
keyCount -= 1

return keyList

def buildFKeyStmt(conn, tableName, key):
unqIndex = getUniqueIndex(conn, key['table'])
keyList = getFKeyList(conn, key['table'])
keyStmt = []
for colName in unqIndex:
# check if the column is a foreign key
keyKey = search_keyList(keyList, colName)
if keyKey:
newStmt = buildFKeyStmt(conn, key['table'], keyKey)
for row in newStmt:
keyStmt.append(row)
else:
keyStmt.append( {'table': tableName, 'column': colName, 'joinTable': key['table'], 'joinColumn': key['to']} )

return keyStmt

######################################################################
# Perform query and populate result set

def run(results, cmdenv, tdb):
from tradedb import TradeDB
from csvexport import exportTableToFile

# check database exists
if not tdb.dbPath.is_file():
Expand All @@ -150,18 +73,21 @@ def run(results, cmdenv, tdb):
# check export path exists
if cmdenv.path:
# the "--path" overwrites the default path of TD
exportDir = Path(cmdenv.path)
exportPath = Path(cmdenv.path)
else:
exportDir = Path(cmdenv.dataDir)
if not exportDir.is_dir():
raise CommandLineError("Save location '{}' not found.".format(cmdenv.path))
exportPath = Path(cmdenv.dataDir)
if not exportPath.is_dir():
raise CommandLineError("Save location '{}' not found.".format(str(exportPath)))

# connect to the database
if not cmdenv.quiet:
print("Using database '{}'".format(tdb.dbFilename))
conn = tdb.getDB()
conn.row_factory = sqlite3.Row

# some tables might be ignored
ignoreList = []

# extract tables from command line
if cmdenv.tables:
bindValues = cmdenv.tables.split(',')
Expand All @@ -171,13 +97,10 @@ def run(results, cmdenv, tdb):
bindValues = []
tableStmt = ''
if not cmdenv.allTables:
ignoreList.append("StationItem")
ignoreList.append("StationBuying")
ignoreList.append("StationSelling")

# prefix for unique/ignore columns
uniquePfx = "unq:"
ignorePfx = "!"
ignoreList += [ "StationItem",
"StationBuying",
"StationSelling",
]

tableCursor = conn.cursor()
for row in tableCursor.execute("""
Expand All @@ -196,103 +119,14 @@ def run(results, cmdenv, tdb):
print("Ignore Table '{table}'".format(table=tableName))
continue

# create CSV files
exportPath = (exportDir / Path(tableName)).with_suffix(".csv")
if not cmdenv.quiet:
print("Export Table '{table}' to '{file}'".format(
table=tableName, file=str(exportPath)
))

lineCount = 0
with exportPath.open("w", encoding='utf-8', newline="\n") as exportFile:
exportOut = csv.writer(exportFile, delimiter=",", quotechar="'", doublequote=True, quoting=csv.QUOTE_NONNUMERIC, lineterminator="\n")

cur = conn.cursor()

# check for single PRIMARY KEY
pkCount = 0
for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName):
# count the columns of the primary key
if columnRow['pk'] > 0: pkCount += 1

# build column list
columnList = []
for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName):
# if there is only one PK column, ignore it
if columnRow['pk'] > 0 and pkCount == 1: continue
columnList.append(columnRow)
print("Export Table '{table}'".format(table=tableName))

# reverse the first two columns for some tables
if tableName in reverseList:
columnList[0], columnList[1] = columnList[1], columnList[0]

# initialize helper lists
csvHead = []
stmtColumn = []
stmtTable = [ tableName ]
stmtOrder = []
unqIndex = getUniqueIndex(conn, tableName)
keyList = getFKeyList(conn, tableName)

cmdenv.DEBUG0('UNIQUE: ' + ", ".join(unqIndex))

# iterate over all columns of the table
for col in columnList:
# check if the column is a foreign key
key = search_keyList(keyList, col['name'])
if key:
# make the join statement
keyStmt = buildFKeyStmt(conn, tableName, key)
for keyRow in keyStmt:
if cmdenv.debug > 0:
print('FK-Stmt: {}'.format(keyRow))
# is the join for the same table
if keyRow['table'] == tableName:
csvPfx = ''
joinStmt = 'USING({})'.format(keyRow['joinColumn'])
else:
# this column must be ignored by the importer, it's only
# used to resolve the FK relation
csvPfx = ignorePfx
joinStmt = 'ON {}.{} = {}.{}'.format(keyRow['table'], keyRow['joinColumn'], keyRow['joinTable'], keyRow['joinColumn'])
if col['name'] in unqIndex:
# column is part of an unique index
csvPfx = uniquePfx + csvPfx
csvHead += [ "{}{}@{}.{}".format(csvPfx, keyRow['column'], keyRow['joinTable'], keyRow['joinColumn']) ]
stmtColumn += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
if col['notnull']:
stmtTable += [ 'INNER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
else:
stmtTable += [ 'LEFT OUTER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ]
stmtOrder += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ]
else:
# ordinary column
if col['name'] in unqIndex:
# column is part of an unique index
csvHead += [ uniquePfx + col['name'] ]
stmtOrder += [ "{}.{}".format(tableName, col['name']) ]
else:
csvHead += [ col['name'] ]
stmtColumn += [ "{}.{}".format(tableName, col['name']) ]

# build the SQL statement
sqlStmt = "SELECT {} FROM {}".format(",".join(stmtColumn)," ".join(stmtTable))
if len(stmtOrder) > 0:
sqlStmt += " ORDER BY {}".format(",".join(stmtOrder))
cmdenv.DEBUG0("SQL: %s" % sqlStmt)

# finally generate the csv file
# write header line without quotes
exportFile.write("{}\n".format(",".join(csvHead)))
for line in cur.execute(sqlStmt):
lineCount += 1
cmdenv.DEBUG2("{count}: {values}".format(count=lineCount, values=list(line)))
exportOut.writerow(list(line))
cmdenv.DEBUG1("{count} {table}s exported".format(count=lineCount, table=tableName))
# create CSV files
lineCount, filePath = exportTableToFile(tdb, cmdenv, tableName, exportPath)
if cmdenv.deleteEmpty and lineCount == 0:
# delete file if emtpy
exportPath.unlink()
if not cmdenv.quiet:
print("Delete empty file {file}'".format(file=exportPath))
filePath.unlink()
cmdenv.DEBUG0("Delete empty file {file}'".format(file=filePath))

return None
Loading

0 comments on commit abb2340

Please sign in to comment.