from typing import Iterator # noqafrom agno.agent import Agent, RunOutputEvent # noqafrom agno.models.ollama import Ollamaagent = Agent(model=Ollama(id="llama3.1:8b"), markdown=True)# Get the response in a variable# run_response: Iterator[RunOutputEvent] = agent.run("Share a 2 sentence horror story", stream=True)# for chunk in run_response:# print(chunk.content)# Print the response in the terminalagent.print_response("Share a 2 sentence horror story", stream=True)
For easier setup without local installation, you can use Ollama Cloud with your API key:
Copy
Ask AI
from agno.agent import Agentfrom agno.models.ollama import Ollama# No local setup required - just set OLLAMA_API_KEYagent = Agent(model=Ollama(id="gpt-oss:120b-cloud", host="https://ollama.com"))agent.print_response("Share a 2 sentence horror story", stream=True)