from openai import OpenAI
import lunary
client = OpenAI(
base_url='http://localhost:11434/v1/', # replace by your Ollama base url
api_key='ollama', #required but ignored
)
lunary.monitor(client)
chat_completion = client.chat.completions.create(
messages=[
{
'role': 'user',
'content': 'Say this is a test',
}
],
model='llama3.2',
)