|
- package gogpt
-
- import (
- "context"
- "errors"
- "net/http"
- )
-
- // Chat message role defined by the OpenAI API.
- const (
- ChatMessageRoleSystem = "system"
- ChatMessageRoleUser = "user"
- ChatMessageRoleAssistant = "assistant"
- )
-
- var (
- ErrChatCompletionInvalidModel = errors.New("currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported")
- )
-
- type ChatCompletionMessage struct {
- Role string `json:"role"`
- Content string `json:"content"`
-
- // This property isn't in the official documentation, but it's in
- // the documentation for the official library for python:
- // - https://github.com/gogpt/gogpt-python/blob/main/chatml.md
- // - https://github.com/gogpt/gogpt-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
- Name string `json:"name,omitempty"`
- }
-
- // ChatCompletionRequest represents a request structure for chat completion API.
- type ChatCompletionRequest struct {
- Model string `json:"model"`
- Messages []ChatCompletionMessage `json:"messages"`
- MaxTokens int `json:"max_tokens,omitempty"`
- Temperature float32 `json:"temperature,omitempty"`
- TopP float32 `json:"top_p,omitempty"`
- N int `json:"n,omitempty"`
- Stream bool `json:"stream,omitempty"`
- Stop []string `json:"stop,omitempty"`
- PresencePenalty float32 `json:"presence_penalty,omitempty"`
- FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
- LogitBias map[string]int `json:"logit_bias,omitempty"`
- User string `json:"user,omitempty"`
- }
-
- type ChatCompletionChoice struct {
- Index int `json:"index"`
- Message ChatCompletionMessage `json:"message"`
- FinishReason string `json:"finish_reason"`
- }
-
- // ChatCompletionResponse represents a response structure for chat completion API.
- type ChatCompletionResponse struct {
- ID string `json:"id"`
- Object string `json:"object"`
- Created int64 `json:"created"`
- Model string `json:"model"`
- Choices []ChatCompletionChoice `json:"choices"`
- Usage Usage `json:"usage"`
- }
-
- // CreateChatCompletion — API call to Create a completion for the chat message.
- func (c *Client) CreateChatCompletion(
- ctx context.Context,
- request ChatCompletionRequest,
- ) (response ChatCompletionResponse, err error) {
- model := request.Model
- switch model {
- case GPT3Dot5Turbo0301, GPT3Dot5Turbo, GPT4, GPT40314, GPT432K0314, GPT432K:
- default:
- err = ErrChatCompletionInvalidModel
- return
- }
-
- urlSuffix := "/chat/completions"
- req, err := c.requestBuilder.build(ctx, http.MethodPost, c.fullURL(urlSuffix), request)
- if err != nil {
- return
- }
-
- err = c.sendRequest(req, &response)
- return
- }
|