ValueError: Model provided by the configuration not supported
david-strejc opened this issue · comments
Traceback (most recent call last):
File "/home/david/Work/Programming/searchgraphai/first_try.py", line 18, in
search_graph = SearchGraph(
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/search_graph.py", line 47, in init
super().init(prompt, config)
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/abstract_graph.py", line 54, in init
self.graph = self._create_graph()
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/search_graph.py", line 61, in _create_graph
smart_scraper_instance = SmartScraperGraph(
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/smart_scraper_graph.py", line 47, in init
super().init(prompt, config, source)
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/abstract_graph.py", line 50, in init
) if "embeddings" not in config else self._create_embedder(
File "/home/david/.local/lib/python3.10/site-packages/scrapegraphai/graphs/abstract_graph.py", line 304, in _create_embedder
raise ValueError(
ValueError: Model provided by the configuration not supported
david @ blackbox:Programming/searchgraphai $ cat first_try.py
from scrapegraphai.graphs import SearchGraph
# Define the configuration for the graph
graph_config = {
"llm": {
"model": "groq/llama3-8b-8192",
"api_key": "",
"temperature": 0
},
"embeddings": {
"model": "ollama/nomic-embed-text",
"base_url": "http://localhost:11434", # set ollama URL arbitrarily
},
"max_results": 5,
}
# Create the SearchGraph instance
search_graph = SearchGraph(
prompt="List me all the traditional recipes from Chioggia",
config=graph_config
)
# Run the graph
result = search_graph.run()
print(result)
thank you, we will fix it as soon as possibile
Hi, pls update, we have just done a new release
closes with d4d913c
Can you tellme how to use the updated version I just downloaded the scrapegraphai module with pip and I'm getting a similar error like above-
Traceback (most recent call last): File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/searchgraph.py", line 25, in <module> smart_scraper_graph = SearchGraph( ^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/search_graph.py", line 43, in __init__ super().__init__(prompt, config) File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 54, in __init__ self.graph = self._create_graph() ^^^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/search_graph.py", line 57, in _create_graph smart_scraper_instance = SmartScraperGraph( ^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/smart_scraper_graph.py", line 47, in __init__ super().__init__(prompt, config, source) File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 50, in __init__ ) if "embeddings" not in config else self._create_embedder( ^^^^^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 287, in _create_embedder raise ValueError( ValueError: Model provided by the configuration not supported
Hei @ManikSinghSarmaal can you copy paste your code?
I'm also having the same issue and I just downloaded the last version today.
This happens when I'm using a loop to iterate through urls to scrape.
def get_ad(url):
ad_scraper = SmartScraperGraph(
prompt="Extract all relevant data in a structured JSON.",
# also accepts a string with the already downloaded HTML code
source=url,
config=graph_config
)
ad = ad_scraper.run()
if ad:
print(ad)
if __name__ == '__main__':
urls = read_urls_from_json()
if urls:
for url in urls:
get_ad(url.get('url'))
else:
print("No URLs to process.")
The error also happens when using asyncio
async def run_blocking_code_in_thread(blocking_func, *args, **kwargs):
loop = asyncio.get_event_loop()
return await loop.run_in_executor(executor, blocking_func, *args, **kwargs)
async def get_ad_async(url):
ad_scraper = SmartScraperGraph(
prompt="Extract all relevant data in a structured JSON.",
source=url,
config=graph_config
)
ad = await run_blocking_code_in_thread(ad_scraper.run)
if ad:
logger.info(json.dumps(ad, indent=4))
async def main():
urls = await read_urls_from_json_async()
if urls:
tasks = [get_ad_async(url.get('url')) for url in urls]
await asyncio.gather(*tasks)
else:
print("No URLs to process.")
if __name__ == '__main__':
asyncio.run(main())
Hei @ManikSinghSarmaal can you copy paste your code?
Hi the code for the above ValueError: Model provided by the configuration not supported is-
from scrapegraphai.graphs import SearchGraph
import nest_asyncio
nest_asyncio.apply()
# Configuration dictionary for the graph
graph_config = {
"llm": {
"model": "ollama/llama3",
"temperature": 0,
"format": "json",
"base_url": "http://localhost:11434",
},
"embeddings": {
"model": "ollama/nomic-embed-text",
"base_url": "http://localhost:11434", # Set the base URL for Ollama
},
"verbose": True, # Enable verbose mode for debugging purposes
"max_results": 5,
'headless':False,
}
smart_scraper_graph = SearchGraph(
#prompt="List all the content",
prompt="Latest Tech News",
config=graph_config
)
result = smart_scraper_graph.run()
print(result)
# Prettify the result and display the JSON
import json
output = json.dumps(result, indent=2)
line_list = output.split("\n")
for line in line_list:
print(line)
And the error is -
Traceback (most recent call last): File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/Search.py", line 23, in <module> smart_scraper_graph = SearchGraph( ^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/search_graph.py", line 43, in __init__ super().__init__(prompt, config) File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 54, in __init__ self.graph = self._create_graph() ^^^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/search_graph.py", line 57, in _create_graph smart_scraper_instance = SmartScraperGraph( ^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/smart_scraper_graph.py", line 47, in __init__ super().__init__(prompt, config, source) File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 50, in __init__ ) if "embeddings" not in config else self._create_embedder( ^^^^^^^^^^^^^^^^^^^^^^ File "/Users/maniksinghsarmaal/Downloads/scrapegraphai/ScrapegraphAIOllamallama3/scrapegraphAI/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 287, in _create_embedder raise ValueError( ValueError: Model provided by the configuration not supported