The Tresor API is fully OpenAI-compatible. You only need to change two things in your existing code:
https://api.trytresor.com/v1tr-)pip install openai
npm install openai
go get github.com/sashabaranov/go-openai
from openai import OpenAI
client = OpenAI(
api_key="YOUR_API_KEY",
base_url="https://api.trytresor.com/v1",
)
response = client.chat.completions.create(
model="gpt-oss-120b",
messages=[{"role": "user", "content": "Hello!"}],
stream=True,
)
for chunk in response:
if chunk.choices[0].delta.content:
print(chunk.choices[0].delta.content, end="")
import OpenAI from "openai";
const client = new OpenAI({
apiKey: "YOUR_API_KEY",
baseURL: "https://api.trytresor.com/v1",
});
const stream = await client.chat.completions.create({
model: "gpt-oss-120b",
messages: [{ role: "user", content: "Hello!" }],
stream: true,
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || "");
}
package main
import (
"context"
"fmt"
openai "github.com/sashabaranov/go-openai"
)
func main() {
cfg := openai.DefaultConfig("YOUR_API_KEY")
cfg.BaseURL = "https://api.trytresor.com/v1"
client := openai.NewClientWithConfig(cfg)
resp, err := client.CreateChatCompletion(context.Background(),
openai.ChatCompletionRequest{
Model: "gpt-oss-120b",
Messages: []openai.ChatCompletionMessage{
{Role: "user", Content: "Hello!"},
},
},
)
if err != nil {
panic(err)
}
fmt.Println(resp.Choices[0].Message.Content)
}