changing to use openrouter
This commit is contained in:
@@ -1,86 +0,0 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (s *llmService) SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error) {
|
||||
apiKey := getEnvConfig("GEMINI_API_KEY")
|
||||
if apiKey == "" {
|
||||
return "", errors.New("GEMINI_API_KEY not found in .env or environment")
|
||||
}
|
||||
|
||||
apiBase := "https://generativelanguage.googleapis.com/v1beta"
|
||||
|
||||
url := fmt.Sprintf("%s/models/%s:generateContent?key=%s", strings.TrimRight(apiBase, "/"), model, apiKey)
|
||||
|
||||
reqBody := map[string]interface{}{}
|
||||
if systemPrompt != "" {
|
||||
reqBody["system_instruction"] = map[string]interface{}{
|
||||
"parts": []map[string]string{
|
||||
{"text": systemPrompt},
|
||||
},
|
||||
}
|
||||
}
|
||||
reqBody["contents"] = []map[string]interface{}{
|
||||
{
|
||||
"role": "user",
|
||||
"parts": []map[string]string{
|
||||
{"text": userPrompt},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
jsonBody, err := json.Marshal(reqBody)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
client := &http.Client{Timeout: 120 * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", fmt.Errorf("Gemini API error status %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Candidates []struct {
|
||||
Content struct {
|
||||
Parts []struct {
|
||||
Text string `json:"text"`
|
||||
} `json:"parts"`
|
||||
} `json:"content"`
|
||||
} `json:"candidates"`
|
||||
}
|
||||
if err := json.Unmarshal(bodyBytes, &result); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(result.Candidates) > 0 && len(result.Candidates[0].Content.Parts) > 0 {
|
||||
return result.Candidates[0].Content.Parts[0].Text, nil
|
||||
}
|
||||
|
||||
return "", errors.New("empty response from Gemini API")
|
||||
}
|
||||
@@ -6,8 +6,7 @@ import (
|
||||
|
||||
// Service defines the interface for connecting to LLMs
|
||||
type Service interface {
|
||||
SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error)
|
||||
SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error)
|
||||
Send(systemPrompt string, userPrompt string) (string, error)
|
||||
}
|
||||
|
||||
type llmService struct{}
|
||||
|
||||
@@ -11,15 +11,23 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error) {
|
||||
apiKey := getEnvConfig("OPENAI_API_KEY")
|
||||
if apiKey == "" {
|
||||
return "", errors.New("OPENAI_API_KEY not found in .env or environment")
|
||||
func (s *llmService) Send(systemPrompt string, userPrompt string) (string, error) {
|
||||
apiURL := getEnvConfig("OPENAI_API_URL")
|
||||
if apiURL == "" {
|
||||
return "", errors.New("OPENAI_API_URL not found in environment")
|
||||
}
|
||||
|
||||
apiBase := "https://api.openai.com/v1"
|
||||
token := getEnvConfig("OPENAI_TOKEN")
|
||||
if token == "" {
|
||||
return "", errors.New("OPENAI_TOKEN not found in environment")
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiBase, "/"))
|
||||
model := getEnvConfig("OPENAI_MODEL")
|
||||
if model == "" {
|
||||
return "", errors.New("OPENAI_MODEL not found in environment")
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiURL, "/"))
|
||||
|
||||
reqBody := map[string]interface{}{
|
||||
"model": model,
|
||||
@@ -42,7 +50,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||
req.Header.Set("Authorization", "Bearer "+token)
|
||||
|
||||
client := &http.Client{Timeout: 120 * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
@@ -62,7 +70,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
lastErr = fmt.Errorf("OpenAI API error status %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
lastErr = fmt.Errorf("API error status %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
time.Sleep(time.Second * time.Duration(1<<i))
|
||||
continue
|
||||
}
|
||||
@@ -81,8 +89,8 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
|
||||
if len(result.Choices) > 0 {
|
||||
return result.Choices[0].Message.Content, nil
|
||||
}
|
||||
return "", errors.New("empty response from OpenAI API")
|
||||
return "", errors.New("empty response from API")
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("failed to get OpenAI response after 5 attempts. Last error: %v", lastErr)
|
||||
return "", fmt.Errorf("failed to get response after 5 attempts. Last error: %v", lastErr)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user