Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions response.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package openai

import (
"context"
"net/http"
)

const (
responsesSuffix = "/responses"
)

type CreateResponseRequest struct {
Model string `json:"model"`
Input any `json:"input"`
Tools []Tool `json:"tools,omitempty"`
PreviousResponseID string `json:"previous_response_id,omitempty"`
}

type CreateResponseResponse struct {
ID string `json:"id"`
Created int64 `json:"created_at"`
Error any `json:"error,omitempty"`
Output []any `json:"output"`
Model string `json:"model"`

httpHeader
}

func (c *Client) CreateResponse(
ctx context.Context,
request CreateResponseRequest,
) (response CreateResponseResponse, err error) {
req, err := c.newRequest(
ctx,
http.MethodPost,
c.fullURL(responsesSuffix),
withBody(request),
)
if err != nil {
return
}

err = c.sendRequest(req, &response)
return
}
56 changes: 56 additions & 0 deletions response_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package openai_test

import (
"context"
"encoding/json"
"fmt"
"net/http"
"strconv"
"testing"
"time"

"github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai/internal/test/checks"
)

func TestCreateResponse(t *testing.T) {
client, server, teardown := setupOpenAITestServer()
defer teardown()
server.RegisterHandler("/v1/responses", handleResponseEndpoint)
_, err := client.CreateResponse(context.Background(), openai.CreateResponseRequest{
Model: "gpt-4o",
Input: "What's the latest news about AI?",
Tools: []openai.Tool{
{
Type: "web_search",
},
},
})
checks.NoError(t, err, "CreateResponse error")
}

func handleResponseEndpoint(w http.ResponseWriter, r *http.Request) {
var err error
var resBytes []byte

if r.Method != "POST" {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}

var responseReq openai.CreateResponseRequest
if err = json.NewDecoder(r.Body).Decode(&responseReq); err != nil {
http.Error(w, "could not read request", http.StatusInternalServerError)
return
}

res := openai.CreateResponseResponse{
ID: "resp_" + strconv.Itoa(int(time.Now().Unix())),
Created: time.Now().Unix(),
Model: responseReq.Model,
Output: []any{},
}

resBytes, _ = json.Marshal(res)
fmt.Fprintln(w, string(resBytes))
}
Loading