forked from Jugendhackt/tokyo-metro-data
-
Notifications
You must be signed in to change notification settings - Fork 0
/
generator.py
292 lines (240 loc) · 10.3 KB
/
generator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
import simplejson as json
import argparse
import signal
import sys
import os
from rich.table import Table
from rich.markup import escape
from rich.console import Console
from rich.progress import Progress, BarColumn, TextColumn
console = Console()
parser = argparse.ArgumentParser(description="Webduino source builder")
parser.add_argument("-v", "--verbose", action='store_true', dest='verbose',
help="Displays verbose output")
parser.add_argument("-o", "--output", metavar="file", type=str,
default="./stations.json", dest='output',
help="location of the output file (default: ./stations.json)")
parser.add_argument("-m", "--matrix", action='store_true',
dest='matrix', default=False,
help="Create transition Matrices")
args = parser.parse_args()
def warn(text):
"""Creates warning in stdout"""
console.print("[bold yellow]WARNING:", "[bold white]" + text)
def query_yes_no(question, default="yes"):
"""Queries user for confimration"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
console.print(question + escape(prompt))
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
console.print("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
# Dump matricies
matrix_dist_file = "./matrix_dist.csv"
matrix_time_file = "./matrix_time.csv"
ask_continue = False
# Ask for user confirmation if files exist
if os.path.exists(args.output):
warn(args.output + " will be overridden!")
ask_continue = True
if args.matrix and os.path.exists(matrix_dist_file):
warn(matrix_dist_file + " will be overridden!")
ask_continue = True
if args.matrix and os.path.exists(matrix_time_file):
warn(matrix_time_file + " will be overridden!")
ask_continue = True
if ask_continue:
try:
if not query_yes_no("Would you like to continue?"):
sys.exit(0)
except KeyboardInterrupt:
sys.exit(0)
# Abort handler
def signal_handler(sig, frame):
print('Aborting!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# Generate Data
with Progress("[progress.percentage]{task.percentage:>3.0f}%", BarColumn(), "[progress.description]{task.description}") as progress:
task1 = progress.add_task("Generating Data", total=1)
step = 0
# Provide funtion to update progress bar
def update_progress(text):
global step
progress.tasks[task1].description = text
progress.update(task1, completed=(step / (9 if args.matrix else 8)))
progress.refresh()
step = step + 1
# Parse CSV file and return a list of lists
def parse_csv(text, sep=",", name="unknown file"):
lines = list(map(lambda t: t.strip(), text.split("\n")))
lines = list(filter(lambda l: l != "" and l[0] != "#", lines))
ret = list(map(lambda l: l.split(sep), lines))
if args.verbose:
padding = 0 if max(map(lambda l: len(l), ret)) > 10 else 1
table = Table(title=name, show_header=False, padding=(0,padding,0,padding))
for row in ret:
table.add_row(*list(map(lambda l: str(l), row)))
progress.stop()
console.print(table, "\n\n\n")
progress.start()
return ret
# Makes station names uniform. For example M,1 and M,01 are turned to M01
def station_id(char, number):
return char + ("0" if number < 10 else "") + str(number)
# Load station names (english and japanese for use in add_connections)
update_progress("Loading stations (1/2 english)")
stations_eng = {}
with open("data/stations_eng.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="stations_eng.csv"):
stations_eng[station_id(line[0], int(line[1]))] = line[2]
update_progress("Loading stations (2/2 japanese)")
stations_jap = {}
with open("data/stations_jap.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="stations_jap.csv"):
stations_jap[station_id(line[0], int(line[1]))] = line[2]
# Load transition types (for use in add_connections)
update_progress("Loading transition types")
default_type = 1
types = {}
with open("data/types.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="types.csv"):
types[int(line[0])] = line[1]
# Add connection to stations structure
def add_connection(container):
def inner(origin, destination, typ, dist, dur):
entry = {
"name_en": str(stations_eng.get(origin)),
"name_jp": str(stations_jap.get(origin)),
"connections": []
}
try:
entry = container[origin]
except KeyError:
pass
conn = {
"target_id": destination,
"type_id": typ,
"type": types.get(typ),
"duration": dur,
"distance": dist
}
entry["connections"] += [conn]
container[origin] = entry
return container
return inner
# Structure to hold the read information
stations = {}
add_station = add_connection(stations)
# Add line to line structure
def add_line(container):
def inner(station, l_key, name):
entry = {
"name_en": "",
"name_jp": ""
}
try:
entry = container[station]
except KeyError:
pass
entry[l_key] = name
container[station] = entry
return container
return inner
# Structure to hold the line information
lines = {}
add_line = add_line(lines)
# Load line names (english)
update_progress("Loading line names (1/2 english)")
with open("data/lines_eng.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="lines_eng.csv"):
add_line(line[0], "name_en", line[1])
# Load line names (japanese)
update_progress("Loading line names (2/2 japanese)")
with open("data/lines_jap.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="lines_jap.csv"):
add_line(line[0], "name_jp", line[1])
# Read lines.csv and append all transitions to metro_map
update_progress("Loading station transitions (1/2 lines.csv)")
with open("data/lines.csv", encoding="utf8") as c_file:
for line in parse_csv(c_file.read(), name="lines.csv"):
char = line[0]
first = int(line[1])
last = int(line[2])
distances = list(map(float, line[3:3+last-first])) # first n entries are distances in km
durations = list(map(int , line[3+last-first:])) # next n entries are durations in s
for station_number in range(first, last):
distance = distances[station_number - first]
duration = durations[station_number - first]
station_a = station_id(char, station_number)
station_b = station_id(char, station_number + 1)
stations = add_station(station_a, station_b, default_type, distance, duration)
stations = add_station(station_b, station_a, default_type, distance, duration)
# Read transitions.csv and append all transitions to metro_map
update_progress("Loading station transitions (1/2 transitions.csv)")
with open("data/transitions.csv", encoding="utf8") as c_file:
for transition in parse_csv(c_file.read(), name="transitions.csv"):
station1 = station_id(transition[0], int(transition[1]))
station2 = station_id(transition[2], int(transition[3]))
type = int(transition[4])
distance = float(transition[5])
duration = int(transition[6])
stations = add_station(station1, station2, type, distance, duration)
stations = add_station(station2, station1, type, distance, duration)
# Dump data as JSON
update_progress("Dumping data as JSON")
dump = {}
dump["lines"] = lines
dump["stations"] = stations
dump["transition_types"] = types
with open(args.output, "w+", encoding="utf8") as c_file:
c_file.write(json.dumps(dump, indent=4, sort_keys=True, ensure_ascii=False))
# Dump data in Matrix format
if args.matrix:
update_progress("Dumping data in matrix format... ")
import pandas as pd
import numpy as np
# Get names of Stations for indexing (rows and columns)
names = stations.keys()
# Create empty matrix for transitions
matrix_dist = np.full((len(names), len(names)), -1.0)
matrix_time = np.full((len(names), len(names)), -1.0)
# Index matrix
matrix_dist = pd.DataFrame(matrix_dist, columns=names, index=names)
matrix_time = pd.DataFrame(matrix_time, columns=names, index=names)
# Fill matrix with data
for station in stations:
for connection in stations[station]["connections"]:
matrix_dist[station][connection["target_id"]] = connection["distance"]
matrix_time[station][connection["target_id"]] = connection["duration"]
# Verbose output matrix
if args.verbose:
progress.stop()
console.print("\n\nTransition distance matrix:\n")
console.print(matrix_dist)
console.print("\n\nTransition time matrix:\n")
console.print(matrix_time)
progress.start()
# Save matrix files
with open(matrix_dist_file, "w+", encoding="utf8") as c_file:
c_file.write(matrix_dist.to_csv(index=True))
with open(matrix_time_file, "w+", encoding="utf8") as c_file:
c_file.write(matrix_time.to_csv(index=True))
###
### Done
###
update_progress("Done")