1- """Ragas LLM Manager - Ragas-specific LLM wrapper that takes LiteLLM parameters ."""
1+ """Ragas LLM Manager - Ragas-specific LLM wrapper."""
22
33from typing import Any , Optional
44
5- import litellm
65from ragas .llms .base import BaseRagasLLM , Generation , LLMResult
76from ragas .metrics import answer_relevancy , faithfulness
87
8+ from lightspeed_evaluation .core .llm .custom import BaseCustomLLM
9+ from lightspeed_evaluation .core .system .exceptions import LLMError
910
10- class RagasCustomLLM (BaseRagasLLM ):
11- """Custom LLM for Ragas using LiteLLM parameters."""
1211
13- def __init__ (self , model_name : str , litellm_params : dict [str , Any ]):
14- """Initialize Ragas custom LLM with model name and LiteLLM parameters."""
15- super ().__init__ ()
16- self .model_name = model_name
17- self .litellm_params = litellm_params
12+ class RagasCustomLLM (BaseRagasLLM , BaseCustomLLM ):
13+ """Custom LLM for Ragas."""
14+
15+ def __init__ (self , model_name : str , llm_params : dict [str , Any ]):
16+ """Initialize Ragas custom LLM with model name and LLM parameters."""
17+ BaseRagasLLM .__init__ (self )
18+ BaseCustomLLM .__init__ (self , model_name , llm_params )
1819 print (f"✅ Ragas Custom LLM: { self .model_name } " )
1920
2021 def generate_text ( # pylint: disable=too-many-arguments,too-many-positional-arguments
@@ -25,42 +26,38 @@ def generate_text( # pylint: disable=too-many-arguments,too-many-positional-arg
2526 stop : Optional [list [str ]] = None ,
2627 callbacks : Optional [Any ] = None ,
2728 ) -> LLMResult :
28- """Generate text using LiteLLM with provided parameters."""
29+ """Generate text using LLM with provided parameters."""
2930 prompt_text = str (prompt )
3031
3132 # Use temperature from params unless explicitly overridden
3233 temp = (
3334 temperature
3435 if temperature != 1e-08
35- else self .litellm_params .get ("temperature" , 0.0 )
36+ else self .llm_params .get ("temperature" , 0.0 )
3637 )
3738
3839 try :
39- response = litellm .completion (
40- model = self .model_name ,
41- messages = [{"role" : "user" , "content" : prompt_text }],
42- n = n ,
43- temperature = temp ,
44- max_tokens = self .litellm_params .get ("max_tokens" ),
45- timeout = self .litellm_params .get ("timeout" ),
46- num_retries = self .litellm_params .get ("num_retries" ),
40+ # Use inherited BaseCustomLLM functionality
41+ call_kwargs = {}
42+ if stop is not None :
43+ call_kwargs ["stop" ] = stop
44+
45+ responses = self .call (
46+ prompt_text , n = n , temperature = temp , return_single = False , ** call_kwargs
4747 )
4848
4949 # Convert to Ragas format
5050 generations = []
51- for choice in response .choices : # type: ignore
52- content = choice .message .content # type: ignore
53- if content is None :
54- content = ""
55- gen = Generation (text = content .strip ())
51+ for response_text in responses :
52+ gen = Generation (text = response_text )
5653 generations .append (gen )
5754
5855 result = LLMResult (generations = [generations ])
5956 return result
6057
6158 except Exception as e :
6259 print (f"❌ Ragas LLM failed: { e } " )
63- raise RuntimeError (f"Ragas LLM evaluation failed: { str (e )} " ) from e
60+ raise LLMError (f"Ragas LLM evaluation failed: { str (e )} " ) from e
6461
6562 async def agenerate_text ( # pylint: disable=too-many-arguments,too-many-positional-arguments
6663 self ,
@@ -87,11 +84,11 @@ class RagasLLMManager:
8784 This manager focuses solely on Ragas-specific LLM integration.
8885 """
8986
90- def __init__ (self , model_name : str , litellm_params : dict [str , Any ]):
87+ def __init__ (self , model_name : str , llm_params : dict [str , Any ]):
9188 """Initialize with LLM parameters from LLMManager."""
9289 self .model_name = model_name
93- self .litellm_params = litellm_params
94- self .custom_llm = RagasCustomLLM (model_name , litellm_params )
90+ self .llm_params = llm_params
91+ self .custom_llm = RagasCustomLLM (model_name , llm_params )
9592
9693 # Configure Ragas metrics to use our custom LLM
9794 answer_relevancy .llm = self .custom_llm
@@ -107,5 +104,5 @@ def get_model_info(self) -> dict[str, Any]:
107104 """Get information about the configured model."""
108105 return {
109106 "model_name" : self .model_name ,
110- "temperature" : self .litellm_params .get ("temperature" , 0.0 ),
107+ "temperature" : self .llm_params .get ("temperature" , 0.0 ),
111108 }
0 commit comments