readysite / pkg / assistant / providers / openai / openai_test.go
7.6 KB
openai_test.go
package openai_test

import (
	"context"
	"encoding/json"
	"errors"
	"fmt"
	"io"
	"net/http"
	"net/http/httptest"
	"testing"

	"github.com/readysite/readysite/pkg/assistant"
	"github.com/readysite/readysite/pkg/assistant/providers/openai"
)

func TestNew_EmptyKey(t *testing.T) {
	_, err := openai.New("")
	if !errors.Is(err, assistant.ErrNoAPIKey) {
		t.Fatalf("expected ErrNoAPIKey, got %v", err)
	}
}

func TestNew_ValidKey(t *testing.T) {
	ai, err := openai.New("sk-test-key")
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}
	if ai == nil {
		t.Fatal("expected non-nil assistant")
	}
}

func TestChat(t *testing.T) {
	server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		// Verify request
		if r.Method != "POST" {
			t.Errorf("expected POST, got %s", r.Method)
		}
		if r.URL.Path != "/chat/completions" {
			t.Errorf("expected /chat/completions, got %s", r.URL.Path)
		}
		if r.Header.Get("Authorization") != "Bearer sk-test-key" {
			t.Errorf("expected Bearer sk-test-key, got %s", r.Header.Get("Authorization"))
		}
		if r.Header.Get("Content-Type") != "application/json" {
			t.Errorf("expected application/json, got %s", r.Header.Get("Content-Type"))
		}

		// Verify request body
		body, _ := io.ReadAll(r.Body)
		var req map[string]any
		json.Unmarshal(body, &req)

		if req["model"] != "gpt-4" {
			t.Errorf("expected model gpt-4, got %v", req["model"])
		}

		w.Header().Set("Content-Type", "application/json")
		fmt.Fprint(w, `{
			"choices": [{
				"message": {
					"content": "Hello from OpenAI!"
				},
				"finish_reason": "stop"
			}],
			"usage": {
				"prompt_tokens": 10,
				"completion_tokens": 20,
				"total_tokens": 30
			}
		}`)
	}))
	defer server.Close()

	ai, err := openai.New("sk-test-key", openai.WithBaseURL(server.URL))
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	resp, err := ai.Chat(context.Background(), assistant.ChatRequest{
		Model: "gpt-4",
		Messages: []assistant.Message{
			assistant.NewUserMessage("Hello"),
		},
	})
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	if resp.Content != "Hello from OpenAI!" {
		t.Errorf("expected 'Hello from OpenAI!', got %q", resp.Content)
	}
	if resp.FinishReason != "stop" {
		t.Errorf("expected finish reason 'stop', got %q", resp.FinishReason)
	}
	if resp.Usage.PromptTokens != 10 {
		t.Errorf("expected 10 prompt tokens, got %d", resp.Usage.PromptTokens)
	}
	if resp.Usage.CompletionTokens != 20 {
		t.Errorf("expected 20 completion tokens, got %d", resp.Usage.CompletionTokens)
	}
	if resp.Usage.TotalTokens != 30 {
		t.Errorf("expected 30 total tokens, got %d", resp.Usage.TotalTokens)
	}
}

func TestChat_Error(t *testing.T) {
	server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		w.WriteHeader(http.StatusUnauthorized)
		fmt.Fprint(w, `{
			"error": {
				"message": "Invalid API key",
				"type": "invalid_request_error",
				"code": "invalid_api_key"
			}
		}`)
	}))
	defer server.Close()

	ai, err := openai.New("sk-bad-key", openai.WithBaseURL(server.URL))
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	_, err = ai.Chat(context.Background(), assistant.ChatRequest{
		Model: "gpt-4",
		Messages: []assistant.Message{
			assistant.NewUserMessage("Hello"),
		},
	})
	if err == nil {
		t.Fatal("expected error, got nil")
	}

	var apiErr *assistant.APIError
	if !errors.As(err, &apiErr) {
		t.Fatalf("expected APIError, got %T: %v", err, err)
	}
	if apiErr.StatusCode != http.StatusUnauthorized {
		t.Errorf("expected status 401, got %d", apiErr.StatusCode)
	}
	if apiErr.Type != "invalid_request_error" {
		t.Errorf("expected type 'invalid_request_error', got %q", apiErr.Type)
	}
	if apiErr.Message != "Invalid API key" {
		t.Errorf("expected message 'Invalid API key', got %q", apiErr.Message)
	}

	// Verify IsAuthError works
	if !assistant.IsAuthError(err) {
		t.Error("expected IsAuthError to return true for 401")
	}
}

func TestChat_ToolCalls(t *testing.T) {
	server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		// Verify tools were sent in request
		body, _ := io.ReadAll(r.Body)
		var req map[string]any
		json.Unmarshal(body, &req)

		tools, ok := req["tools"].([]any)
		if !ok || len(tools) != 1 {
			t.Errorf("expected 1 tool in request, got %v", req["tools"])
		}

		w.Header().Set("Content-Type", "application/json")
		fmt.Fprint(w, `{
			"choices": [{
				"message": {
					"content": "",
					"tool_calls": [{
						"id": "call_abc123",
						"type": "function",
						"function": {
							"name": "get_weather",
							"arguments": "{\"location\":\"San Francisco\"}"
						}
					}]
				},
				"finish_reason": "tool_calls"
			}],
			"usage": {
				"prompt_tokens": 15,
				"completion_tokens": 25,
				"total_tokens": 40
			}
		}`)
	}))
	defer server.Close()

	ai, err := openai.New("sk-test-key", openai.WithBaseURL(server.URL))
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	resp, err := ai.Chat(context.Background(), assistant.ChatRequest{
		Model: "gpt-4",
		Messages: []assistant.Message{
			assistant.NewUserMessage("What is the weather in San Francisco?"),
		},
		Tools: []assistant.Tool{
			assistant.NewTool("get_weather", "Get the current weather").
				String("location", "City name", true).
				Build(),
		},
	})
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	if len(resp.ToolCalls) != 1 {
		t.Fatalf("expected 1 tool call, got %d", len(resp.ToolCalls))
	}
	tc := resp.ToolCalls[0]
	if tc.ID != "call_abc123" {
		t.Errorf("expected tool call ID 'call_abc123', got %q", tc.ID)
	}
	if tc.Name != "get_weather" {
		t.Errorf("expected tool name 'get_weather', got %q", tc.Name)
	}

	var args struct {
		Location string `json:"location"`
	}
	if err := tc.ParseArguments(&args); err != nil {
		t.Fatalf("failed to parse arguments: %v", err)
	}
	if args.Location != "San Francisco" {
		t.Errorf("expected location 'San Francisco', got %q", args.Location)
	}

	if resp.FinishReason != "tool_calls" {
		t.Errorf("expected finish reason 'tool_calls', got %q", resp.FinishReason)
	}
}

func TestStream(t *testing.T) {
	server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		// Verify stream=true in request
		body, _ := io.ReadAll(r.Body)
		var req map[string]any
		json.Unmarshal(body, &req)

		if req["stream"] != true {
			t.Errorf("expected stream=true, got %v", req["stream"])
		}

		w.Header().Set("Content-Type", "text/event-stream")
		w.Header().Set("Cache-Control", "no-cache")

		flusher, ok := w.(http.Flusher)
		if !ok {
			t.Fatal("expected ResponseWriter to implement Flusher")
		}

		// Send SSE chunks
		chunks := []string{
			`data: {"choices":[{"delta":{"content":"Hello"},"finish_reason":null}]}`,
			`data: {"choices":[{"delta":{"content":" "},"finish_reason":null}]}`,
			`data: {"choices":[{"delta":{"content":"World"},"finish_reason":null}]}`,
			`data: {"choices":[{"delta":{},"finish_reason":"stop"}]}`,
			`data: [DONE]`,
		}
		for _, chunk := range chunks {
			fmt.Fprintf(w, "%s\n\n", chunk)
			flusher.Flush()
		}
	}))
	defer server.Close()

	ai, err := openai.New("sk-test-key", openai.WithBaseURL(server.URL))
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	stream, err := ai.Stream(context.Background(), assistant.ChatRequest{
		Model: "gpt-4",
		Messages: []assistant.Message{
			assistant.NewUserMessage("Hello"),
		},
	})
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}
	defer stream.Close()

	resp, err := stream.Collect()
	if err != nil {
		t.Fatalf("unexpected error: %v", err)
	}

	if resp.Content != "Hello World" {
		t.Errorf("expected 'Hello World', got %q", resp.Content)
	}
	if resp.FinishReason != "stop" {
		t.Errorf("expected finish reason 'stop', got %q", resp.FinishReason)
	}
}
← Back