diff --git a/mindforge.cronjob/.env.example b/mindforge.cronjob/.env.example index ce61d0f..4367af5 100644 --- a/mindforge.cronjob/.env.example +++ b/mindforge.cronjob/.env.example @@ -1,14 +1,9 @@ GIT_REPOSITORY=https://git.url/user/repo.git -OPENAI_API_KEY=openai_api_key -GEMINI_API_KEY=gemini_api_key DISCORD_WEBHOOK_URL=discord_webhook_channel_url -# LLM provider per agent function ("openai" or "gemini", defaults to "openai") -SUMMARY_CREATOR_PROVIDER=gemini -SUMMARY_FORMATTER_PROVIDER=openai - -# LLM models -GEMINI_MODEL=gemini-3-flash-preview -OPENAI_MODEL=gpt-5-mini +# OpenAI-compatible provider (e.g. OpenRouter) +OPENAI_API_URL=https://openrouter.ai/api/v1 +OPENAI_TOKEN=your_token_here +OPENAI_MODEL=openai/gpt-5.4-mini TOP_N_FILES=10 \ No newline at end of file diff --git a/mindforge.cronjob/deploy/mindforge-cronjob.yaml b/mindforge.cronjob/deploy/mindforge-cronjob.yaml index ab4a6f1..f414df6 100644 --- a/mindforge.cronjob/deploy/mindforge-cronjob.yaml +++ b/mindforge.cronjob/deploy/mindforge-cronjob.yaml @@ -22,16 +22,15 @@ spec: secretKeyRef: name: mindforge-secrets key: GIT_REPOSITORY - - name: GEMINI_API_KEY + - name: OPENAI_TOKEN valueFrom: secretKeyRef: name: mindforge-secrets - key: GEMINI_API_KEY - - name: OPENAI_API_KEY - valueFrom: - secretKeyRef: - name: mindforge-secrets - key: OPENAI_API_KEY + key: OPENAI_TOKEN + - name: OPENAI_API_URL + value: https://openrouter.ai/api/v1 + - name: OPENAI_MODEL + value: openai/gpt-5.4-mini - name: DISCORD_WEBHOOK_URL valueFrom: secretKeyRef: @@ -42,14 +41,6 @@ spec: secretKeyRef: name: mindforge-secrets key: HAVEN_NOTIFY_URL - - name: SUMMARY_CREATOR_PROVIDER - value: gemini - - name: SUMMARY_FORMATTER_PROVIDER - value: openai - - name: GEMINI_MODEL - value: gemini-3-flash-preview - - name: OPENAI_MODEL - value: gpt-5-mini - name: TOP_N_FILES value: "10" - name: LAST_N_DAYS diff --git a/mindforge.cronjob/internal/agent/agent.go b/mindforge.cronjob/internal/agent/agent.go index 1790ae0..c0bc006 100644 --- a/mindforge.cronjob/internal/agent/agent.go +++ b/mindforge.cronjob/internal/agent/agent.go @@ -2,50 +2,11 @@ package agent import ( "fmt" - "os" "path/filepath" - "strings" "mindforge.cronjob/internal/llm" ) -// Provider represents the LLM provider to use. -type Provider string - -const ( - ProviderOpenAI Provider = "openai" - ProviderGemini Provider = "gemini" -) - -// providerFromEnv reads the provider for a given agent from an env var, -// defaulting to OpenAI if not set or unrecognised. -func providerFromEnv(envKey string) Provider { - val := strings.ToLower(strings.TrimSpace(os.Getenv(envKey))) - if val == string(ProviderGemini) { - return ProviderGemini - } - return ProviderOpenAI -} - -// send routes the request to the given LLM provider. -func send(provider Provider, systemPrompt, userPrompt string) (string, error) { - llmService := llm.NewLLMService() - switch provider { - case ProviderGemini: - geminiModel := os.Getenv("GEMINI_MODEL") - if geminiModel == "" { - geminiModel = "gemini-3.1-flash-lite-preview" - } - return llmService.SendGeminiRequest(systemPrompt, userPrompt, geminiModel) - default: - openaiModel := os.Getenv("OPENAI_MODEL") - if openaiModel == "" { - openaiModel = "gpt-5-mini" - } - return llmService.SendOpenAIRequest(systemPrompt, userPrompt, openaiModel) - } -} - // SummaryCreatorAgent creates a summary of the git diff for a specific file. func SummaryCreatorAgent(filePath, gitDiff string) (string, error) { fileName := filepath.Base(filePath) @@ -66,7 +27,7 @@ Responda sempre em Português do Brasil (pt-BR).` userPrompt := fmt.Sprintf("Caminho do arquivo: %s\nPasta (Assunto Principal): %s\nArquivo (Assunto Específico): %s\n\nGit Diff:\n%s", filePath, folderName, fileName, gitDiff) - return send(providerFromEnv("SUMMARY_CREATOR_PROVIDER"), systemPrompt, userPrompt) + return llm.NewLLMService().Send(systemPrompt, userPrompt) } // SummaryFormatterAgent formats a plain text summary into Markdown. @@ -82,5 +43,5 @@ Regras de formatação: Responda sempre em Português do Brasil (pt-BR).` - return send(providerFromEnv("SUMMARY_FORMATTER_PROVIDER"), systemPrompt, summary) + return llm.NewLLMService().Send(systemPrompt, summary) } diff --git a/mindforge.cronjob/internal/llm/gemini.go b/mindforge.cronjob/internal/llm/gemini.go deleted file mode 100644 index dc640c1..0000000 --- a/mindforge.cronjob/internal/llm/gemini.go +++ /dev/null @@ -1,86 +0,0 @@ -package llm - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "strings" - "time" -) - -func (s *llmService) SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error) { - apiKey := getEnvConfig("GEMINI_API_KEY") - if apiKey == "" { - return "", errors.New("GEMINI_API_KEY not found in .env or environment") - } - - apiBase := "https://generativelanguage.googleapis.com/v1beta" - - url := fmt.Sprintf("%s/models/%s:generateContent?key=%s", strings.TrimRight(apiBase, "/"), model, apiKey) - - reqBody := map[string]interface{}{} - if systemPrompt != "" { - reqBody["system_instruction"] = map[string]interface{}{ - "parts": []map[string]string{ - {"text": systemPrompt}, - }, - } - } - reqBody["contents"] = []map[string]interface{}{ - { - "role": "user", - "parts": []map[string]string{ - {"text": userPrompt}, - }, - }, - } - - jsonBody, err := json.Marshal(reqBody) - if err != nil { - return "", err - } - - req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody)) - if err != nil { - return "", err - } - req.Header.Set("Content-Type", "application/json") - - client := &http.Client{Timeout: 120 * time.Second} - resp, err := client.Do(req) - if err != nil { - return "", err - } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return "", err - } - - if resp.StatusCode != http.StatusOK { - return "", fmt.Errorf("Gemini API error status %d: %s", resp.StatusCode, string(bodyBytes)) - } - - var result struct { - Candidates []struct { - Content struct { - Parts []struct { - Text string `json:"text"` - } `json:"parts"` - } `json:"content"` - } `json:"candidates"` - } - if err := json.Unmarshal(bodyBytes, &result); err != nil { - return "", err - } - - if len(result.Candidates) > 0 && len(result.Candidates[0].Content.Parts) > 0 { - return result.Candidates[0].Content.Parts[0].Text, nil - } - - return "", errors.New("empty response from Gemini API") -} diff --git a/mindforge.cronjob/internal/llm/llm.go b/mindforge.cronjob/internal/llm/llm.go index 1d37704..f0f88c1 100644 --- a/mindforge.cronjob/internal/llm/llm.go +++ b/mindforge.cronjob/internal/llm/llm.go @@ -6,8 +6,7 @@ import ( // Service defines the interface for connecting to LLMs type Service interface { - SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error) - SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error) + Send(systemPrompt string, userPrompt string) (string, error) } type llmService struct{} diff --git a/mindforge.cronjob/internal/llm/openai.go b/mindforge.cronjob/internal/llm/openai.go index 4debd4f..64d37cf 100644 --- a/mindforge.cronjob/internal/llm/openai.go +++ b/mindforge.cronjob/internal/llm/openai.go @@ -11,15 +11,23 @@ import ( "time" ) -func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error) { - apiKey := getEnvConfig("OPENAI_API_KEY") - if apiKey == "" { - return "", errors.New("OPENAI_API_KEY not found in .env or environment") +func (s *llmService) Send(systemPrompt string, userPrompt string) (string, error) { + apiURL := getEnvConfig("OPENAI_API_URL") + if apiURL == "" { + return "", errors.New("OPENAI_API_URL not found in environment") } - apiBase := "https://api.openai.com/v1" + token := getEnvConfig("OPENAI_TOKEN") + if token == "" { + return "", errors.New("OPENAI_TOKEN not found in environment") + } - url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiBase, "/")) + model := getEnvConfig("OPENAI_MODEL") + if model == "" { + return "", errors.New("OPENAI_MODEL not found in environment") + } + + url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiURL, "/")) reqBody := map[string]interface{}{ "model": model, @@ -42,7 +50,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m return "", err } req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", "Bearer "+apiKey) + req.Header.Set("Authorization", "Bearer "+token) client := &http.Client{Timeout: 120 * time.Second} resp, err := client.Do(req) @@ -62,7 +70,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m } if resp.StatusCode != http.StatusOK { - lastErr = fmt.Errorf("OpenAI API error status %d: %s", resp.StatusCode, string(bodyBytes)) + lastErr = fmt.Errorf("API error status %d: %s", resp.StatusCode, string(bodyBytes)) time.Sleep(time.Second * time.Duration(1< 0 { return result.Choices[0].Message.Content, nil } - return "", errors.New("empty response from OpenAI API") + return "", errors.New("empty response from API") } - return "", fmt.Errorf("failed to get OpenAI response after 5 attempts. Last error: %v", lastErr) + return "", fmt.Errorf("failed to get response after 5 attempts. Last error: %v", lastErr) }