Skip to main content

Example Integration (OPEN AI)

Integration for OpenAI or Azure OpenAI

Combining all the steps, here's how the integration looks for OpenAI:

from logspend_sdk.core import LogBuilder, LogSpendLogger
from openai import OpenAI

client = OpenAI()
logger = LogSpendLogger(api_key="<YOUR_LOGSPEND_API_KEY>", project_id="<YOUR_LOGSPEND_PROJECT_ID>")
model_payload = {
"model": "gpt-3.5-turbo",
"messages": [
{
"role": "system",
"content": "You are a helpful customer support assistant"
},
{
"role": "user",
"content": "Hello, can I get help with my booking"
}
],
"temperature": 0.7
}
builder = LogBuilder(model_payload)
completion = client.chat.completions.create(**model_payload)
builder.set_output(completion.model_dump_json())
builder.set_identity({"session_id": "session-123", "user_id": "253e-4741-a4fe"})
logger.send(builder.build())

Integration for other providers and models:

Combining all the steps, here's how the integration looks in a Python script:

import os
from logspend_sdk.core import LogBuilder, LogSpendLogger
from logspend_sdk.constants import Provider

# Retrieve API Key
api_key = os.environ.get('LOGSPEND_API_KEY')
if not api_key:
raise ValueError("Missing LogSpend API Key. Ensure the LOGSPEND_API_KEY environment variable is set.")

# Initialize logger
logger = LogSpendLogger(api_key=api_key, project_id="<YOUR_LOGSPEND_PROJECT_ID>")

# Prepare input data, instantiate LogBuilder, and set custom properties before calling the LLM API
input_data = {...} # As provided above
identity_data = {...} # As provided above
custom_properties_data = {...} # As provided above

builder = LogBuilder(input_data)
builder.set_identity(identity_data)
builder.set_custom_properties(custom_properties_data)

# Make a call to your LLM provider or self-hosted LLM API
output_data = call_openai(input_data)

# Set the output for the builder only after the call to the LLM API
builder.set_output(output_data)

# Send the log
logger.send(builder.build())