This example demonstrates multi-model routing where the agent intelligently selects the best model based on task complexity, with model attributes for optimization.
import os
from dedalus_labs import AsyncDedalus, DedalusRunner
from dotenv import load_dotenv

load_dotenv()

async def main():
    client = AsyncDedalus()
    runner = DedalusRunner(client)

    result = await runner.run(
        input="Find the year GPT-5 released, and handoff to Claude to write a haiku about Elon Musk.",
        model=["openai/gpt-4.1", "claude-3-5-sonnet-20241022"],
        mcp_servers=["dedalus-labs/brave-search-mcp"],
        stream=False
    )

    print(result.final_output)

if __name__ == "__main__":
    main()