Skip to content

Commit

Permalink
Merged kfsone/tradedangerous into master
Browse files Browse the repository at this point in the history
  • Loading branch information
maddavo committed Dec 19, 2014
2 parents 19bdd77 + fa562eb commit 7771c11
Show file tree
Hide file tree
Showing 4 changed files with 60 additions and 28 deletions.
5 changes: 5 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
TradeDangerous, Copyright (C) Oliver "kfsone" Smith, July 2014
==============================================================================

v6.2.4 [wip]
. (kfsone) Fix for UTF-8 decoding error,
. (kfsone) Rebuild cache before .prices file after downloading .csvs
. (maddavo) Combat Stabilisers do exist

v6.2.3 Dec 17 2014
. (kfsone) "maddavo" import plugin:
. --opt=skipdl will use previous downloads
Expand Down
2 changes: 1 addition & 1 deletion commands/import_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
)
name='import'
epilog=None
wantsTradeDB=True
wantsTradeDB=False
arguments = [
]
switches = [
Expand Down
68 changes: 43 additions & 25 deletions plugins/maddavo_plug.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,13 @@ class ImportPlugin(plugins.ImportPluginBase):
dateRe = re.compile(r"(\d\d\d\d-\d\d-\d\d)[ T](\d\d:\d\d:\d\d)")

options = {
'syscsv': "Also download System.csv from the site.",
'stncsv': "Also download Station.csv from the site.",
'skipdl': "Skip doing any downloads.",
'buildcache': "Forces a rebuild of the cache before processing "
"of the .prices file.",
'syscsv': "Also download System.csv from the site.",
'stncsv': "Also download Station.csv from the site.",
'skipdl': "Skip doing any downloads.",
'force': "Process prices even if timestamps suggest "
"there is no new data."
}


Expand Down Expand Up @@ -62,36 +66,52 @@ def save_timestamp(self, newestDate):


def run(self):
tdb, tdenv = self.tdb, self.tdenv

cacheNeedsRebuild = self.getOption("buildcache")
if not self.getOption("skipdl"):
cacheNeedsRebuild = False
if self.getOption("syscsv"):
transfers.download(
self.tdenv,
tdenv,
"http://www.davek.com.au/td/System.csv",
"data/System.csv",
backup=True,
)
cacheNeedsRebuild = True
if self.getOption("stncsv"):
transfers.download(
self.tdenv,
"http://www.davek.com.au/td/Station.csv",
"data/Station.csv",
backup=True,
)
try:
transfers.download(
tdenv,
"http://www.davek.com.au/td/Station.csv",
"data/Station.csv",
backup=True,
)
except transfers.HTTP404 as e:
if not tdenv.quiet:
print("Got HTTP 404 Error: Trying alternate URL...")
transfers.download(
tdenv,
"http://www.davek.com.au/td/station.asp",
"data/Station.csv",
backup=True,
)
cacheNeedsRebuild = True
# Download
transfers.download(
self.tdenv,
tdenv,
"http://www.davek.com.au/td/prices.asp",
self.filename,
)

if self.tdenv.download:
if tdenv.download:
if cacheNeedsRebuild:
print("NOTE: Did not rebuild cache")
return False

tdenv.ignoreUnknown = True

if cacheNeedsRebuild:
tdb = self.tdb
tdb = tdb
# Make sure we disconnect from the db
if tdb.conn:
tdb.conn.close()
Expand Down Expand Up @@ -146,11 +166,12 @@ def run(self):
))

if numNewLines == 0:
if not self.tdenv.quiet:
print("No new data - nothing to do - doing nothing.")
return False
if not tdenv.quiet:
print("Cache is up-to date / no new price entries.")
if not self.getOption("force"):
return False

if self.tdenv.detail:
if tdenv.detail:
print(
"Date of last import : {}\n"
"Timestamp of import : {}\n"
Expand All @@ -166,21 +187,18 @@ def run(self):
))

numStationsUpdated = len(updatedStations)
if not self.tdenv.quiet and numStationsUpdated:
if len(updatedStations) > 12 and self.tdenv.detail < 2:
if not tdenv.quiet and numStationsUpdated:
if len(updatedStations) > 12 and tdenv.detail < 2:
updatedStations = list(updatedStations)[:10] + ["..."]
print("{} {} updated:\n{}".format(
numStationsUpdated,
"stations" if numStationsUpdated > 1 else "station",
', '.join(updatedStations)
))

# Temporarily disable "ignoreUnkown"
mytdenv = tradeenv.TradeEnv(properties=self.tdenv)
mytdenv.ignoreUnknown = True
cache.importDataFromFile(
self.tdb,
mytdenv,
tdb,
tdenv,
pathlib.Path(self.filename),
)

Expand Down
13 changes: 11 additions & 2 deletions transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
######################################################################
# Helpers

class HTTP404(TradeException):
pass


def makeUnit(value):
"""
Expand Down Expand Up @@ -76,6 +79,12 @@ def download(
print("Connecting to server: {}".format(url))
try:
f = urlopen(req)
except urllib.error.HTTPError as e:
if e.code == 404:
raise HTTP404("{}: {}".format(e, url))
raise TradeException(
"HTTP Error: "+url+": "+str(e)
)
except urllib.error.URLError as e:
raise TradeException(
"Unable to connect ("+url+")\n"+str(e)
Expand All @@ -94,7 +103,7 @@ def download(
tmpPath = Path(localFile + ".dl")
actPath = Path(localFile)

with tmpPath.open("w") as fh:
with tmpPath.open("wb") as fh:
# Use the 'while True' approach so that we always print the
# download status including, especially, the 100% report.
while True:
Expand All @@ -118,7 +127,7 @@ def download(

chunk = f.read(chunkSize)
fetched += len(chunk)
print(chunk.decode(), file=fh, end="")
fh.write(chunk)

# Swap the file into place
if backup:
Expand Down

0 comments on commit 7771c11

Please sign in to comment.