Prerequisites:
- A Valid Qubrid AI Account logged in the platform
- Enough Credits in your Account should be present to make sure the requests get processed
- A Valid API Ke to be replaced in
YOUR_API_KEYsection, generated using steps mentioned in How to generate an API Key
Mistral 7B Instruct v0.3 - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/chat/completions"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "mistralai/Mistral-7B-Instruct-v0.3",
"messages": [
{
"role": "user",
"content": "Explain quantum computing simply."
}
],
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {"choices": [{"message": {"content": "Quantum computing uses qubits..."}}]}
GPT-OSS 20B - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/chat/completions"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "openai/gpt-oss-20b",
"messages": [
{
"role": "user",
"content": "Explain quantum computing simply."
}
],
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {"choices": [{"message": {"content": "Quantum computing uses qubits..."}}]}
IBM Granite 20B Code Instruct - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/chat/completions"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "ibm-granite/granite-20b-code-instruct-8k",
"messages": [
{
"role": "user",
"content": "Explain quantum computing simply."
}
],
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {}
Stable Code 3B - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/chat/completions"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "stabilityai/stable-code-instruct-3b",
"messages": [
{
"role": "user",
"content": "Explain quantum computing simply."
}
],
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {}
Qwen Image Edit - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/image/generation"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "Qwen/Qwen-Image-Edit",
"positive_prompt": "A futuristic city at sunset with flying cars",
"width": 1024,
"height": 1024,
"steps": 30,
"cfg": 7.5,
"seed": 42
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# Binary PNG image stream (save with open('generated.png','wb').write(response.content))
Stable Diffusion - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/image/generation"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "stabilityai/stable-diffusion-3.5-large",
"positive_prompt": "A futuristic city at sunset with flying cars",
"width": 1024,
"height": 1024,
"steps": 30,
"cfg": 7.5,
"seed": 42
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# Binary PNG image stream (save with open('generated.png','wb').write(response.content))
Whisper Large V3 - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/audio/transcribe"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
files = {"file": open("audio.wav", "rb")}
data = {"model": "openai/whisper-large-v3"}
response = requests.post(url, headers=headers, files=files, data=data)
print(response.json())
# Expected Output:
# {"text": "Welcome to Qubrid AI", "language": "en"}
Qwen3-VL 8B Instruct - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/multimodal/chat"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "Qwen/Qwen3-VL-8B-Instruct",
"prompt": "Explain quantum computing simply.",
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {"choices": [{"message": {"content": "A brown cat sitting on a chair."}}]}
Qwen2.5-VL 7B instruct - View Model
import requests, json, base64
url = "https://platform.qubrid.com/api/v1/qubridai/multimodal/chat"
headers = {"Authorization": "Bearer YOUR_API_KEY"}
headers["Content-Type"] = "application/json"
data = {
"model": "Qwen/Qwen2.5-VL-7B-Instruct",
"prompt": "Explain quantum computing simply.",
"temperature": 0.7,
"max_tokens": 500
}
response = requests.post(url, headers=headers, json=data)
print(json.dumps(response.json(), indent=2))
# Expected Output:
# {"choices": [{"message": {"content": "A brown cat sitting on a chair."}}]}