Files
s01e02/internal/infrastructure/llm/lmstudio.go
2026-03-12 02:10:57 +01:00

124 lines
3.2 KiB
Go

package llm
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"github.com/paramah/ai_devs4/s01e02/internal/domain"
)
// LMStudioProvider implements domain.LLMProvider for local LM Studio
type LMStudioProvider struct {
baseURL string
model string
client *http.Client
}
// NewLMStudioProvider creates a new LM Studio provider
func NewLMStudioProvider(baseURL, model string) *LMStudioProvider {
return &LMStudioProvider{
baseURL: baseURL,
model: model,
client: &http.Client{},
}
}
type lmStudioRequest struct {
Model string `json:"model"`
Messages []domain.LLMMessage `json:"messages"`
Tools []domain.Tool `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
}
type lmStudioResponse struct {
Choices []struct {
Message domain.LLMMessage `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Error json.RawMessage `json:"error,omitempty"`
}
// Chat sends a chat request with function calling support
func (p *LMStudioProvider) Chat(ctx context.Context, request domain.LLMRequest) (*domain.LLMResponse, error) {
reqBody := lmStudioRequest{
Model: p.model,
Messages: request.Messages,
Tools: request.Tools,
Temperature: request.Temperature,
}
if request.ToolChoice != "" {
if request.ToolChoice == "auto" {
reqBody.ToolChoice = "auto"
} else {
reqBody.ToolChoice = map[string]interface{}{
"type": "function",
"function": map[string]string{
"name": request.ToolChoice,
},
}
}
}
jsonData, err := json.Marshal(reqBody)
if err != nil {
return nil, fmt.Errorf("marshaling request: %w", err)
}
url := p.baseURL + "/v1/chat/completions"
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(jsonData))
if err != nil {
return nil, fmt.Errorf("creating request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := p.client.Do(req)
if err != nil {
return nil, fmt.Errorf("sending request: %w", err)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("reading response: %w", err)
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(body))
}
var apiResp lmStudioResponse
if err := json.Unmarshal(body, &apiResp); err != nil {
return nil, fmt.Errorf("unmarshaling response: %w\nResponse body: %s", err, string(body))
}
if len(apiResp.Error) > 0 {
var errStr string
if err := json.Unmarshal(apiResp.Error, &errStr); err == nil {
return nil, fmt.Errorf("API error: %s", errStr)
}
var errObj struct {
Message string `json:"message"`
}
if err := json.Unmarshal(apiResp.Error, &errObj); err == nil {
return nil, fmt.Errorf("API error: %s", errObj.Message)
}
return nil, fmt.Errorf("API error: %s", string(apiResp.Error))
}
if len(apiResp.Choices) == 0 {
return nil, fmt.Errorf("no choices in response. Response body: %s", string(body))
}
return &domain.LLMResponse{
Message: apiResp.Choices[0].Message,
FinishReason: apiResp.Choices[0].FinishReason,
}, nil
}