@@ -241,18 +241,20 @@ type CompletionRequest struct {
241241 LogitBias map [string ]int `json:"logit_bias,omitempty"`
242242 // Store can be set to true to store the output of this completion request for use in distillations and evals.
243243 // https://platform.openai.com/docs/api-reference/chat/create#chat-create-store
244- Store bool `json:"store,omitempty"`
245- LogProbs int `json:"logprobs,omitempty"`
246- MaxTokens int `json:"max_tokens,omitempty"`
247- N int `json:"n,omitempty"`
248- PresencePenalty float32 `json:"presence_penalty,omitempty"`
249- Seed * int `json:"seed,omitempty"`
250- Stop []string `json:"stop,omitempty"`
251- Stream bool `json:"stream,omitempty"`
252- Suffix string `json:"suffix,omitempty"`
253- Temperature float32 `json:"temperature,omitempty"`
254- TopP float32 `json:"top_p,omitempty"`
255- User string `json:"user,omitempty"`
244+ Store bool `json:"store,omitempty"`
245+ // Metadata to store with the completion.
246+ Metadata map [string ]string `json:"metadata,omitempty"`
247+ LogProbs int `json:"logprobs,omitempty"`
248+ MaxTokens int `json:"max_tokens,omitempty"`
249+ N int `json:"n,omitempty"`
250+ PresencePenalty float32 `json:"presence_penalty,omitempty"`
251+ Seed * int `json:"seed,omitempty"`
252+ Stop []string `json:"stop,omitempty"`
253+ Stream bool `json:"stream,omitempty"`
254+ Suffix string `json:"suffix,omitempty"`
255+ Temperature float32 `json:"temperature,omitempty"`
256+ TopP float32 `json:"top_p,omitempty"`
257+ User string `json:"user,omitempty"`
256258}
257259
258260// CompletionChoice represents one of possible completions.
0 commit comments