Skip to content

Commit

Permalink
bugfix: Properly log and set metric to none by default in Astra DB Co…
Browse files Browse the repository at this point in the history
…mponent (langflow-ai#4390)

* bugfix: Properly log and set metric to none by default

* Update Vector Store RAG.json

* Update astradb.py
  • Loading branch information
erichare authored and diogocabral committed Nov 26, 2024
1 parent 3a44a6b commit 0d9e80c
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 14 deletions.
23 changes: 11 additions & 12 deletions src/backend/base/langflow/components/vectorstores/astradb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import orjson
from astrapy.admin import parse_api_endpoint
from loguru import logger

from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
from langflow.helpers import docs_to_data
Expand Down Expand Up @@ -219,7 +218,7 @@ def insert_in_dict(self, build_config, field_name, new_parameters):

# Find the index of the key to insert after
idx = len(items)
for i, (key, _value) in enumerate(items):
for i, (key, _) in enumerate(items):
if key == field_name:
idx = i + 1
break
Expand Down Expand Up @@ -412,8 +411,8 @@ def build_vector_store(self, vectorize_options=None):
token=self.token,
api_endpoint=self.api_endpoint,
namespace=self.namespace or None,
environment=parse_api_endpoint(self.api_endpoint).environment,
metric=self.metric,
environment=parse_api_endpoint(self.api_endpoint).environment if self.api_endpoint else None,
metric=self.metric or None,
batch_size=self.batch_size or None,
bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None,
bulk_insert_overwrite_concurrency=self.bulk_insert_overwrite_concurrency or None,
Expand Down Expand Up @@ -445,14 +444,14 @@ def _add_documents_to_vector_store(self, vector_store) -> None:
raise TypeError(msg)

if documents:
logger.debug(f"Adding {len(documents)} documents to the Vector Store.")
self.log(f"Adding {len(documents)} documents to the Vector Store.")
try:
vector_store.add_documents(documents)
except Exception as e:
msg = f"Error adding documents to AstraDBVectorStore: {e}"
raise ValueError(msg) from e
else:
logger.debug("No documents to add to the Vector Store.")
self.log("No documents to add to the Vector Store.")

def _map_search_type(self) -> str:
if self.search_type == "Similarity with score threshold":
Expand All @@ -477,9 +476,9 @@ def search_documents(self, vector_store=None) -> list[Data]:
if not vector_store:
vector_store = self.build_vector_store()

logger.debug(f"Search input: {self.search_input}")
logger.debug(f"Search type: {self.search_type}")
logger.debug(f"Number of results: {self.number_of_results}")
self.log(f"Search input: {self.search_input}")
self.log(f"Search type: {self.search_type}")
self.log(f"Number of results: {self.number_of_results}")

if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():
try:
Expand All @@ -491,13 +490,13 @@ def search_documents(self, vector_store=None) -> list[Data]:
msg = f"Error performing search in AstraDBVectorStore: {e}"
raise ValueError(msg) from e

logger.debug(f"Retrieved documents: {len(docs)}")
self.log(f"Retrieved documents: {len(docs)}")

data = docs_to_data(docs)
logger.debug(f"Converted documents to data: {len(data)}")
self.log(f"Converted documents to data: {len(data)}")
self.status = data
return data
logger.debug("No search input provided. Skipping search.")
self.log("No search input provided. Skipping search.")
return []

def get_retriever_kwargs(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -889,7 +889,7 @@
"title_case": false,
"trace_as_metadata": true,
"type": "str",
"value": ""
"value": "cosine"
},
"namespace": {
"advanced": true,
Expand Down Expand Up @@ -2222,7 +2222,7 @@
"title_case": false,
"trace_as_metadata": true,
"type": "str",
"value": ""
"value": "cosine"
},
"namespace": {
"advanced": true,
Expand Down

0 comments on commit 0d9e80c

Please sign in to comment.