from literalai import LiteralClient
from openai import OpenAI
client = OpenAI(
api_key="anything", # litellm proxy virtual key
base_url="http://0.0.0.0:4000" # litellm proxy base_url
)
literalai_client = LiteralClient(api_key="")
# Instrument the OpenAI client
literalai_client.instrument_openai()
settings = {
"model": "gpt-4o-mini", # model you want to send litellm proxy
"temperature": 0,
# ... more settings
}
response = client.chat.completions.create(
messages=[
{
"role": "system",
"content": "You are a helpful bot, you always reply in Spanish"
},
{
"role": "user",
"content": message.content
}
],
**settings
)