import os
from dotenv import load_dotenv
from bindu.penguin.bindufy import bindufy
from agno.agent import Agent
from agno.tools.duckduckgo import DuckDuckGoTools
from agno.models.openrouter import OpenRouter
load_dotenv()
agent = Agent(
instructions="You are a weather research assistant. When asked about weather, provide a clear, concise weather report with current conditions, temperature, and forecast. Focus on the most relevant information and present it in an organized, easy-to-read format. Avoid showing multiple search results - synthesize the information into a single coherent response.",
model=OpenRouter(
id="openai/gpt-oss-120b",
api_key=os.getenv("OPENROUTER_API_KEY")
),
tools=[DuckDuckGoTools()],
)
config = {
"author": "[email protected]",
"name": "weather_research_agent",
"description": "Research agent that finds current weather and forecasts for any city worldwide",
"deployment": {
"url": "http://localhost:3773",
"expose": True,
"cors_origins": ["http://localhost:5173"]
},
"skills": ["skills/weather-research-skill"],
}
def handler(messages: list[dict[str, str]]):
if messages:
latest_message = messages[-1].get('content', '') if isinstance(messages[-1], dict) else str(messages[-1])
result = agent.run(input=latest_message)
if hasattr(result, 'content'):
return result.content
elif hasattr(result, 'response'):
return result.response
else:
return str(result)
return "Please provide a location for weather information."
bindufy(config, handler)
#bindufy(config, handler, launch=True)
# This will create a tunnel to your agent and expose it on port 3773