Skip to main content

Python Integration

Using requests

Setup

import requests

headers = {
"Content-Type": "application/json",
"X-API-KEY": "your_api_key"
}

Router Endpoint

response = requests.post(
"https://mintii-router-500540193826.us-central1.run.app/route/mintiiv0",
headers=headers,
json={"prompt": "Hi!"}
)

# Access response data
data = response.json()
print(data["message_content"]) # Model's response
print(data["model"]) # Model used
print(data["total_tokens_used"])
print(data["response_time"])

Single Model Endpoint

response = requests.post(
"https://mintii-router-500540193826.us-central1.run.app/route/single_model",
params={"provider": "groq", "model": "gemma-7b-it"},
headers=headers,
json={"prompt": "Hi!"}
)

Using OpenAI Library

Setup

from openai import Client

client = Client(
api_key="your_api_key",
base_url="https://mintii-router-500540193826.us-central1.run.app"
)

Router Usage

response = client.chat.completions.create(
messages=[{"role": "user", "content": "Hi!"}],
model="mintiiv0"
)

Single Model Usage

response = client.chat.completions.create(
messages=[{"role": "user", "content": "Hi!"}],
model="single_model",
provider="groq",
model_name="gemma-7b-it"
)