41 lines
No EOL
1.3 KiB
Python
41 lines
No EOL
1.3 KiB
Python
import asyncio
|
|
import json
|
|
from modules.agentservice_agent_webcrawler import WebcrawlerAgent
|
|
import logging
|
|
|
|
# Konfiguration des Loggers
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
handlers=[logging.StreamHandler()]
|
|
)
|
|
|
|
|
|
async def main():
|
|
agent = WebcrawlerAgent()
|
|
# Fixed: Passing a dictionary with "task" key for the message_context parameter
|
|
results = await agent.get_web_query({
|
|
"task": "Bitte führe eine Webanalyse durch, welche Firmen haben die gleichen Produkte wie ValueOn AG?"
|
|
})
|
|
print(results) # Since result is a string, not JSON
|
|
|
|
|
|
async def main2():
|
|
agent = WebcrawlerAgent()
|
|
# Hier mit await aufrufen
|
|
results = await agent.run_web_query(prompt="Welche Firmen haben die gleichen Produkte wie ValueOn AG?")
|
|
with open('test_result.json', 'w') as file:
|
|
json.dump(results, file)
|
|
# Print the results
|
|
if isinstance(results, list):
|
|
for i, result in enumerate(results, 1):
|
|
print(f"\nResult {i}:")
|
|
print(f"Title: {result['title']}")
|
|
print(f"URL: {result['url']}")
|
|
print(f"Snippet: {result['snippet']}")
|
|
|
|
|
|
|
|
# Asynchronen Code ausführen
|
|
if __name__ == "__main__":
|
|
asyncio.run(main()) |