11from typing import Callable , Dict , List , Optional
2- from tenacity import (
2+
3+ from tenacity import ( # for exponential backoff
34 retry ,
45 stop_after_attempt ,
56 wait_random_exponential ,
6- ) # for exponential backoff
7+ )
78
89from redisvl .vectorize .base import BaseVectorizer
910
@@ -30,7 +31,7 @@ def embed_many(
3031 texts : List [str ],
3132 preprocess : Optional [Callable ] = None ,
3233 batch_size : Optional [int ] = 10 ,
33- as_buffer : Optional [float ] = False
34+ as_buffer : Optional [float ] = False ,
3435 ) -> List [List [float ]]:
3536 """Embed many chunks of texts using the OpenAI API.
3637
@@ -50,7 +51,8 @@ def embed_many(
5051 for batch in self .batchify (texts , batch_size , preprocess ):
5152 response = self ._model_client .create (input = batch , engine = self ._model )
5253 embeddings += [
53- self ._process_embedding (r ["embedding" ], as_buffer ) for r in response ["data" ]
54+ self ._process_embedding (r ["embedding" ], as_buffer )
55+ for r in response ["data" ]
5456 ]
5557 return embeddings
5658
@@ -59,7 +61,7 @@ def embed(
5961 self ,
6062 text : str ,
6163 preprocess : Optional [Callable ] = None ,
62- as_buffer : Optional [float ] = False
64+ as_buffer : Optional [float ] = False ,
6365 ) -> List [float ]:
6466 """Embed a chunk of text using the OpenAI API.
6567
@@ -84,7 +86,7 @@ async def aembed_many(
8486 texts : List [str ],
8587 preprocess : Optional [Callable ] = None ,
8688 batch_size : int = 1000 ,
87- as_buffer : Optional [bool ] = False
89+ as_buffer : Optional [bool ] = False ,
8890 ) -> List [List [float ]]:
8991 """Asynchronously embed many chunks of texts using the OpenAI API.
9092
@@ -104,7 +106,8 @@ async def aembed_many(
104106 for batch in self .batchify (texts , batch_size , preprocess ):
105107 response = await self ._model_client .acreate (input = batch , engine = self ._model )
106108 embeddings += [
107- self ._process_embedding (r ["embedding" ], as_buffer ) for r in response ["data" ]
109+ self ._process_embedding (r ["embedding" ], as_buffer )
110+ for r in response ["data" ]
108111 ]
109112 return embeddings
110113
@@ -113,7 +116,7 @@ async def aembed(
113116 self ,
114117 text : str ,
115118 preprocess : Optional [Callable ] = None ,
116- as_buffer : Optional [bool ] = False
119+ as_buffer : Optional [bool ] = False ,
117120 ) -> List [float ]:
118121 """Asynchronously embed a chunk of text using the OpenAI API.
119122
0 commit comments