Skip to content

Commit 96986a1

Browse files
jamescalamzachschillaci27
authored andcommitted
added i_end in batch extraction (langchain-ai#907)
Fix for issue langchain-ai#906 Switches `[i : i + batch_size]` to `[i : i_end]` in Pinecone `from_texts` method
1 parent 04e95cf commit 96986a1

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

langchain/vectorstores/pinecone.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -198,17 +198,17 @@ def from_texts(
198198
# set end position of batch
199199
i_end = min(i + batch_size, len(texts))
200200
# get batch of texts and ids
201-
lines_batch = texts[i : i + batch_size]
201+
lines_batch = texts[i:i_end]
202202
# create ids if not provided
203203
if ids:
204-
ids_batch = ids[i : i + batch_size]
204+
ids_batch = ids[i:i_end]
205205
else:
206206
ids_batch = [str(uuid.uuid4()) for n in range(i, i_end)]
207207
# create embeddings
208208
embeds = embedding.embed_documents(lines_batch)
209209
# prep metadata and upsert batch
210210
if metadatas:
211-
metadata = metadatas[i : i + batch_size]
211+
metadata = metadatas[i:i_end]
212212
else:
213213
metadata = [{} for _ in range(i, i_end)]
214214
for j, line in enumerate(lines_batch):

0 commit comments

Comments
 (0)