Skip to content

Commit 6773ed7

Browse files
committed
Add strict flag to TokenIDE sheets
1 parent 3353f58 commit 6773ed7

File tree

2 files changed

+24
-17
lines changed

2 files changed

+24
-17
lines changed

scripts/build.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,4 +28,4 @@
2828

2929
for model in "TI-82", "TI-83", "TI-83+", "TI-84+", "TI-84+CSE", "TI-84+CE":
3030
with open(f"built/tokenide/{model}.xml", "w+", encoding="UTF-8") as outfile:
31-
outfile.write(sheet.with_tokens(version=OsVersion(model, "latest")).to_xml_string())
31+
outfile.write(sheet.with_tokens(version=OsVersion(model, "latest"), strict=True).to_xml_string())

scripts/tokenide.py

+23-16
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,8 @@ def build_page(element: ET.Element, byte: str, dct: dict):
121121
return sheet
122122

123123
def with_tokens(self, *,
124-
version: OsVersion = None, tokens: Tokens = None, file=None, lang: str = 'en') -> 'TokenIDESheet':
124+
version: OsVersion = None, tokens: Tokens = None, file=None,
125+
lang: str = 'en', strict: bool = False) -> 'TokenIDESheet':
125126
"""
126127
Constructs a copy of this sheet updated with the specified token data from the token sheets
127128
@@ -132,10 +133,13 @@ def with_tokens(self, *,
132133
:param tokens: A Tokens container of tokens to add (defaults to all tokens)
133134
:param file: A file object to read tokens from (defaults to the 8X token sheet)
134135
:param lang: A language code (defaults to "en")
136+
:param strict: Whether to remove tokens not present in the targeted version (default to False)
135137
:return: A TokenIDESheet containing the union of this sheet and the specified token data
136138
"""
137139

138140
sheet = self.sheet.copy()
141+
if strict:
142+
sheet["tokens"] = {}
139143

140144
if tokens is None:
141145
if file is None:
@@ -156,38 +160,41 @@ def with_tokens(self, *,
156160

157161
leading, trailing = byte[:1], byte[1:]
158162

159-
dct = sheet["tokens"]
160-
value = f"${leading.hex().upper()}"
163+
old = self.sheet.copy()["tokens"]
164+
new = sheet["tokens"]
161165

162-
if value not in dct:
163-
dct[value] = {"string": None, "variants": set(), "attrib": {}, "tokens": {}}
166+
value = f"${leading.hex().upper()}"
167+
if value not in new:
168+
new[value] = old.get(value, {"string": None, "variants": set(), "attrib": {}, "tokens": {}})
169+
if strict:
170+
new[value]["tokens"] = {}
164171

165172
if trailing:
166-
dct = dct[value]["tokens"]
173+
old = old[value]["tokens"]
174+
new = new[value]["tokens"]
167175
value = f"${trailing.hex().upper()}"
168176

169-
if value not in dct:
170-
dct[value] = {"string": None, "variants": set(), "attrib": {}, "tokens": {}}
177+
new[value] = old.get(value, {"string": None, "variants": set(), "attrib": {}, "tokens": {}})
171178

172179
translation = token.langs.get(lang, "en")
173180
display = translation.display
174181

175-
if dct[value]["string"] not in [*translation.names(), display]:
176-
dct[value]["string"] = translation.accessible
182+
if new[value]["string"] not in [*translation.names(), display]:
183+
new[value]["string"] = translation.accessible
177184

178-
dct[value]["variants"] |= {name for name in translation.names() if all_names.count(name) == 1}
185+
new[value]["variants"] |= {name for name in translation.names() if all_names.count(name) == 1}
179186

180-
string = dct[value]["string"]
187+
string = new[value]["string"]
181188
if string not in display and display not in string and all_names.count(display) == 1:
182-
dct[value]["variants"].add(display)
189+
new[value]["variants"].add(display)
183190

184-
dct[value]["variants"] -= {string}
191+
new[value]["variants"] -= {string}
185192

186193
if byte in TokenIDESheet.STARTERS:
187-
dct[value]["attrib"]["stringStarter"] = "true"
194+
new[value]["attrib"]["stringStarter"] = "true"
188195

189196
if byte in TokenIDESheet.TERMINATORS:
190-
dct[value]["attrib"]["stringTerminator"] = "true"
197+
new[value]["attrib"]["stringTerminator"] = "true"
191198

192199
return TokenIDESheet(sheet)
193200

0 commit comments

Comments
 (0)