-
Notifications
You must be signed in to change notification settings - Fork 155
/
Copy pathembedding_adapters.py
211 lines (187 loc) · 7.39 KB
/
embedding_adapters.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
# embedding_adapters.py
# -*- coding: utf-8 -*-
import logging
import requests
import traceback
from typing import List
from langchain_openai import OpenAIEmbeddings, AzureOpenAIEmbeddings
def ensure_openai_base_url_has_v1(url: str) -> str:
"""
若用户输入的 url 不包含 '/v1',则在末尾追加 '/v1'。
"""
import re
url = url.strip()
if not url:
return url
if not re.search(r'/v\d+$', url):
if '/v1' not in url:
url = url.rstrip('/') + '/v1'
return url
class BaseEmbeddingAdapter:
"""
Embedding 接口统一基类
"""
def embed_documents(self, texts: List[str]) -> List[List[float]]:
raise NotImplementedError
def embed_query(self, query: str) -> List[float]:
raise NotImplementedError
class OpenAIEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 OpenAIEmbeddings(或兼容接口)的适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
self._embedding = OpenAIEmbeddings(
openai_api_key=api_key,
openai_api_base=ensure_openai_base_url_has_v1(base_url),
model=model_name
)
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return self._embedding.embed_documents(texts)
def embed_query(self, query: str) -> List[float]:
return self._embedding.embed_query(query)
class AzureOpenAIEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 AzureOpenAIEmbeddings(或兼容接口)的适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
import re
match = re.match(r'https://(.+?)/openai/deployments/(.+?)/embeddings\?api-version=(.+)', base_url)
if match:
self.azure_endpoint = f"https://{match.group(1)}"
self.azure_deployment = match.group(2)
self.api_version = match.group(3)
else:
raise ValueError("Invalid Azure OpenAI base_url format")
self._embedding = AzureOpenAIEmbeddings(
azure_endpoint=self.azure_endpoint,
azure_deployment=self.azure_deployment,
openai_api_key=api_key,
api_version=self.api_version,
)
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return self._embedding.embed_documents(texts)
def embed_query(self, query: str) -> List[float]:
return self._embedding.embed_query(query)
class OllamaEmbeddingAdapter(BaseEmbeddingAdapter):
"""
其接口路径为 /api/embeddings
"""
def __init__(self, model_name: str, base_url: str):
self.model_name = model_name
self.base_url = base_url.rstrip("/")
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = []
for text in texts:
vec = self._embed_single(text)
embeddings.append(vec)
return embeddings
def embed_query(self, query: str) -> List[float]:
return self._embed_single(query)
def _embed_single(self, text: str) -> List[float]:
"""
调用 Ollama 本地服务 /api/embeddings 接口,获取文本 embedding
"""
url = self.base_url.rstrip("/")
if "/api/embeddings" not in url:
if "/api" in url:
url = f"{url}/embeddings"
else:
if "/v1" in url:
url = url[:url.index("/v1")]
url = f"{url}/api/embeddings"
data = {
"model": self.model_name,
"prompt": text
}
try:
response = requests.post(url, json=data)
response.raise_for_status()
result = response.json()
if "embedding" not in result:
raise ValueError("No 'embedding' field in Ollama response.")
return result["embedding"]
except requests.exceptions.RequestException as e:
logging.error(f"Ollama embeddings request error: {e}\n{traceback.format_exc()}")
return []
class MLStudioEmbeddingAdapter(BaseEmbeddingAdapter):
def __init__(self, api_key: str, base_url: str, model_name: str):
self._embedding = OpenAIEmbeddings(
openai_api_key=api_key,
openai_api_base=ensure_openai_base_url_has_v1(base_url),
model=model_name
)
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return self._embedding.embed_documents(texts)
def embed_query(self, query: str) -> List[float]:
return self._embedding.embed_query(query)
class GeminiEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 Google Generative AI (Gemini) 接口的 Embedding 适配器
使用直接 POST 请求方式,URL 示例:
https://generativelanguage.googleapis.com/v1beta/models/text-embedding-004:embedContent?key=YOUR_API_KEY
"""
def __init__(self, api_key: str, model_name: str, base_url: str):
"""
:param api_key: 传入的 Google API Key
:param model_name: 这里一般是 "text-embedding-004"
:param base_url: e.g. https://generativelanguage.googleapis.com/v1beta/models
"""
self.api_key = api_key
self.model_name = model_name
self.base_url = base_url.rstrip("/")
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = []
for text in texts:
vec = self._embed_single(text)
embeddings.append(vec)
return embeddings
def embed_query(self, query: str) -> List[float]:
return self._embed_single(query)
def _embed_single(self, text: str) -> List[float]:
"""
直接调用 Google Generative Language API (Gemini) 接口,获取文本 embedding
"""
url = f"{self.base_url}/{self.model_name}:embedContent?key={self.api_key}"
payload = {
"model": self.model_name,
"content": {
"parts": [
{"text": text}
]
}
}
try:
response = requests.post(url, json=payload)
print(response.text)
response.raise_for_status()
result = response.json()
embedding_data = result.get("embedding", {})
return embedding_data.get("values", [])
except requests.exceptions.RequestException as e:
logging.error(f"Gemini embed_content request error: {e}\n{traceback.format_exc()}")
return []
except Exception as e:
logging.error(f"Gemini embed_content parse error: {e}\n{traceback.format_exc()}")
return []
def create_embedding_adapter(
interface_format: str,
api_key: str,
base_url: str,
model_name: str
) -> BaseEmbeddingAdapter:
"""
工厂函数:根据 interface_format 返回不同的 embedding 适配器实例
"""
fmt = interface_format.strip().lower()
if fmt == "openai":
return OpenAIEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "azure openai":
return AzureOpenAIEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "ollama":
return OllamaEmbeddingAdapter(model_name, base_url)
elif fmt == "ml studio":
return MLStudioEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "gemini":
return GeminiEmbeddingAdapter(api_key, model_name, base_url)
else:
raise ValueError(f"Unknown embedding interface_format: {interface_format}")