Skip to content

Commit 3d0e47f

Browse files
authored
Fix issue NVIDIA#43 (empty files creation) and improve reading/writing performance
This commit fixes issue NVIDIA#43 (empty files created when invoking reshard_jsonl method at nemo_curator.utils.file_utils.py) by double-checking the files size after being generated, and deleting them with size zero. In addition to that, I have noticed there is no need to parse to JSON object the content of the different lines, which should be already in json format. By removing that extra-parsing, there is a significant speed up in the execution of this method. Signed-off-by: Miguel Martínez <[email protected]>
1 parent f4355af commit 3d0e47f

File tree

1 file changed

+11
-4
lines changed

1 file changed

+11
-4
lines changed

nemo_curator/utils/file_utils.py

+11-4
Original file line numberDiff line numberDiff line change
@@ -182,8 +182,7 @@ def _save_jsonl(documents, output_path, start_index=0, max_index=10000, prefix=N
182182
"""Worker function to write out the data to jsonl files"""
183183

184184
def _output_json(document):
185-
myjson = json.dumps(document, ensure_ascii=False)
186-
return myjson.encode("utf-8")
185+
return document.strip().encode('utf-8')
187186

188187
def _name(start_index, npad, prefix, i):
189188
tag = str(start_index + i).rjust(npad, "0")
@@ -195,11 +194,19 @@ def _name(start_index, npad, prefix, i):
195194

196195
output_glob_string = os.path.join(output_path, "*.jsonl")
197196

198-
documents.map(_output_json).to_textfiles(
197+
output_files = documents.map(_output_json).to_textfiles(
199198
output_glob_string,
200199
name_function=name,
201200
)
202201

202+
# Delete empty files generated due to empty partitions in the bag
203+
for output_file in output_files:
204+
try:
205+
if os.path.getsize(output_file) == 0:
206+
os.remove(output_file)
207+
except Exception as exception:
208+
print(f"An exception occurred when trying to delete {output_file}.\n{exception}", flush=True)
209+
203210

204211
def reshard_jsonl(
205212
input_dir, output_dir, output_file_size="100M", start_index=0, file_prefix=""
@@ -222,7 +229,7 @@ def reshard_jsonl(
222229
input_files = list(get_all_files_paths_under(input_dir))
223230

224231
# Read in the dask bag
225-
b = db.read_text(input_files, blocksize=blocksize).map(json.loads)
232+
b = db.read_text(input_files, blocksize=blocksize)
226233

227234
# Prepare the output
228235
output_dir = expand_outdir_and_mkdir(output_dir)

0 commit comments

Comments
 (0)