API Reference

Started With OpenAI SDK

This page will help you get started with Create chat completion.

You can also use UniOne AI with OpenAI's client API:

from openai import OpenAI

# custom base URL
client = OpenAI(
    base_url="https://unione.ai/openai/v1",
    api_key="your-key",
)

stream = client.chat.completions.create(
    model="gpt-3.5-turbo",
    messages=[{"role": "user", "content": "Say this is a test"}],
    stream=True,
)
for chunk in stream:
    if chunk.choices[0].delta.content is not None:
        print(chunk.choices[0].delta.content, end="")


"""
This is a test.
"""
package main

import (
    "context"
    "errors"
    "fmt"
    "io"

    openai "github.com/sashabaranov/go-openai"
)

const token = "your-key"

func main() {
    conf := openai.DefaultConfig(token)
    conf.BaseURL = "https://unione.ai/openai/v1"
    client := openai.NewClientWithConfig(conf)
    
    // create chat completion stream
    stream, err := client.CreateChatCompletionStream(
        context.Background(),
        openai.ChatCompletionRequest{
            Model: openai.GPT3Dot5Turbo,
            Messages: []openai.ChatCompletionMessage{
                {
                    Role:    openai.ChatMessageRoleUser,
                    Content: "Hello!",
                },
            },
        },
    )
    if err != nil {
        fmt.Printf("ChatCompletion error: %v\n", err)
        return
    }

    for {
        msg, err := stream.Recv()
        if errors.Is(err, io.EOF) {
            return
        }
        if err != nil {
            fmt.Printf("ChatCompletion stream error: %v\n", err)
            return
        }

        // write to response
        if len(msg.Choices) == 0 {
            fmt.Println("no choices")
            return
        }
        fmt.Printf("%s\n", msg.Choices[0].Delta.Content)
    }
}

Now only support stream mode in chat API.