Files
temporal-ai-agent/run_ollama.py
2024-12-31 11:46:57 -08:00

21 lines
467 B
Python

from ollama import chat, ChatResponse
def main():
model_name = 'mistral'
# The messages to pass to the model
messages = [
{
'role': 'user',
'content': 'Why is the sky blue?',
}
]
# Call ollama's chat function
response: ChatResponse = chat(model=model_name, messages=messages)
# Print the full message content
print(response.message.content)
if __name__ == '__main__':
main()