-
-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Description
Here is the example code I am using, with some minor adjustments. Bedrock will not instantiate with a temperature parameter, so I deleted that, and I went into the code for abstract_graph.py and deleted any default instantiation of a temperature parameter.
"""
Basic example of scraping pipeline using SmartScraper
"""
import os
from dotenv import load_dotenv
from scrapegraphai.graphs import SmartScraperGraph
from scrapegraphai.utils import prettify_exec_info
load_dotenv()
************************************************
Define the configuration for the graph
************************************************
graph_config = {
"llm": {
"client": "bedrock",
"model": "bedrock/anthropic.claude-3-sonnet-20240229-v1:0"
}
}
************************************************
Create the SmartScraperGraph instance and run it
************************************************
smart_scraper_graph = SmartScraperGraph(
prompt="List me all the projects with their description",
# also accepts a string with the already downloaded HTML code
source="https://perinim.github.io/projects/",
config=graph_config
)
result = smart_scraper_graph.run()
print(result)
************************************************
Get graph execution info
************************************************
graph_exec_info = smart_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))
Here is the error that I get that I cannot solve, I think it's an issue deep within the langchain library, however I am not sure if there are any workarounds:
Traceback (most recent call last):
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_aws\llms\bedrock.py", line 715, in _prepare_input_and_invoke
response = self.client.invoke_model(**request_options)
^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'str' object has no attribute 'invoke_model'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\AnthonyHakim\\test.py", line 72, in
result = smart_scraper_graph.run()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\scrapegraphai\graphs\smart_scraper_graph.py", line 114, in run
self.final_state, self.execution_info = self.graph.execute(inputs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\scrapegraphai\graphs\base_graph.py", line 263, in execute
return self._execute_standard(initial_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\scrapegraphai\graphs\base_graph.py", line 184, in _execute_standard
raise e
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\scrapegraphai\graphs\base_graph.py", line 168, in _execute_standard
result = current_node.execute(state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\scrapegraphai\nodes\generate_answer_node.py", line 134, in execute
answer = chain.invoke({"question": user_prompt})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\runnables\base.py", line 2878, in invoke
input = context.run(step.invoke, input, config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\language_models\chat_models.py", line 276, in invoke
self.generate_prompt(
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\language_models\chat_models.py", line 776, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\language_models\chat_models.py", line 633, in generate
raise e
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\language_models\chat_models.py", line 623, in generate
self._generate_with_cache(
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_core\language_models\chat_models.py", line 845, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_aws\chat_models\bedrock.py", line 552, in _generate
completion, tool_calls, llm_output = self._prepare_input_and_invoke(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\AnthonyHakim\anaconda3\envs\Lib\site-packages\langchain_aws\llms\bedrock.py", line 726, in _prepare_input_and_invoke
raise ValueError(f"Error raised by bedrock service: {e}")
ValueError: Error raised by bedrock service: 'str' object has no attribute 'invoke_model'
(I removed project names from paths)