Skip to content

Commit c1628dd

Browse files
authored
[Bugfix] Fix delivered log issue in delivery cli (#2489)
1 parent 589c76f commit c1628dd

File tree

1 file changed

+13
-1
lines changed

1 file changed

+13
-1
lines changed

python/mlc_llm/cli/delivery.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,18 @@ def _run_quantization(
186186
return succeeded
187187

188188

189+
def _get_current_log(log: str) -> ModelDeliveryList:
190+
log_path = Path(log)
191+
if not log_path.exists():
192+
with log_path.open("w", encoding="utf-8") as o_f:
193+
current_log = ModelDeliveryList(tasks=[])
194+
json.dump(current_log.to_json(), o_f, indent=4)
195+
else:
196+
with log_path.open("r", encoding="utf-8") as i_f:
197+
current_log = ModelDeliveryList.from_json(json.load(i_f))
198+
return current_log
199+
200+
189201
def _main( # pylint: disable=too-many-locals, too-many-arguments
190202
username: str,
191203
api: HfApi,
@@ -195,7 +207,7 @@ def _main( # pylint: disable=too-many-locals, too-many-arguments
195207
output: str,
196208
):
197209
failed_cases: List[Tuple[str, str]] = []
198-
delivered_log = ModelDeliveryList(tasks=[])
210+
delivered_log = _get_current_log(log)
199211
for task_index, task in enumerate(spec.tasks, 1):
200212
logger.info(
201213
bold("[{task_index}/{total_tasks}] Processing model: ").format(

0 commit comments

Comments
 (0)