Compare commits

...

2 Commits

Author SHA1 Message Date
b9736293d3 moving to openrouter
All checks were successful
Mindforge API Build and Deploy / Build Mindforge API Image (push) Successful in 3m39s
Mindforge Cronjob Build and Deploy / Build Mindforge Cronjob Image (push) Successful in 3m49s
Mindforge API Build and Deploy / Deploy Mindforge API (internal) (push) Successful in 38s
Mindforge Cronjob Build and Deploy / Deploy Mindforge Cronjob (internal) (push) Successful in 30s
2026-04-04 21:09:18 -03:00
d0543544f8 changing to use openrouter 2026-04-04 21:00:30 -03:00
17 changed files with 89 additions and 505 deletions

View File

@@ -1,8 +0,0 @@
namespace Mindforge.API.Models.Enums
{
public enum LlmProvider
{
OpenAI,
Gemini
}
}

View File

@@ -34,7 +34,6 @@ builder.Services.AddHttpClient();
// Register Providers // Register Providers
builder.Services.AddScoped<ILlmApiProvider, OpenAIApiProvider>(); builder.Services.AddScoped<ILlmApiProvider, OpenAIApiProvider>();
builder.Services.AddScoped<ILlmApiProvider, GeminiApiProvider>();
// Register Services // Register Services
builder.Services.AddScoped<IAgentService, AgentService>(); builder.Services.AddScoped<IAgentService, AgentService>();
@@ -59,30 +58,26 @@ app.UseAuthorization();
app.MapControllers(); app.MapControllers();
// Check for env vars // Check for env vars
var openAiKey = builder.Configuration["OPENAI_API_KEY"]; var openAiApiUrl = builder.Configuration["OPENAI_API_URL"];
var geminiKey = builder.Configuration["GEMINI_API_KEY"]; var openAiToken = builder.Configuration["OPENAI_TOKEN"];
var openAiModel = builder.Configuration["OPENAI_MODEL"];
if (string.IsNullOrEmpty(openAiKey)) if (string.IsNullOrEmpty(openAiApiUrl))
{ app.Logger.LogWarning("OPENAI_API_URL not found in configuration.");
app.Logger.LogWarning("OPENAI_API_KEY not found in configuration.");
}
if (string.IsNullOrEmpty(geminiKey)) if (string.IsNullOrEmpty(openAiToken))
{ app.Logger.LogWarning("OPENAI_TOKEN not found in configuration.");
app.Logger.LogWarning("GEMINI_API_KEY not found in configuration.");
} if (string.IsNullOrEmpty(openAiModel))
app.Logger.LogWarning("OPENAI_MODEL not found in configuration.");
var giteaRepoUrl = builder.Configuration["GITEA_REPO_URL"]; var giteaRepoUrl = builder.Configuration["GITEA_REPO_URL"];
var giteaAccessToken = builder.Configuration["GITEA_ACCESS_TOKEN"]; var giteaAccessToken = builder.Configuration["GITEA_ACCESS_TOKEN"];
if (string.IsNullOrEmpty(giteaRepoUrl)) if (string.IsNullOrEmpty(giteaRepoUrl))
{
app.Logger.LogWarning("GITEA_REPO_URL not found in configuration. Repository features will not work."); app.Logger.LogWarning("GITEA_REPO_URL not found in configuration. Repository features will not work.");
}
if (string.IsNullOrEmpty(giteaAccessToken)) if (string.IsNullOrEmpty(giteaAccessToken))
{
app.Logger.LogWarning("GITEA_ACCESS_TOKEN not found in configuration. Repository features will not work."); app.Logger.LogWarning("GITEA_ACCESS_TOKEN not found in configuration. Repository features will not work.");
}
app.Run(); app.Run();

View File

@@ -1,202 +0,0 @@
using System;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
namespace Mindforge.API.Providers
{
public class GeminiApiProvider : ILlmApiProvider
{
private readonly HttpClient _httpClient;
private readonly IConfiguration _configuration;
private readonly ILogger<GeminiApiProvider> _logger;
public GeminiApiProvider(HttpClient httpClient, IConfiguration configuration, ILogger<GeminiApiProvider> logger)
{
_httpClient = httpClient;
_httpClient.Timeout = TimeSpan.FromMinutes(5);
_configuration = configuration;
_logger = logger;
}
public async Task<string> SendRequestAsync(string systemPrompt, string userPrompt, string model)
{
var apiKey = _configuration["GEMINI_API_KEY"];
if (string.IsNullOrEmpty(apiKey))
{
throw new Exception("GEMINI_API_KEY not found in configuration.");
}
var apiBase = "https://generativelanguage.googleapis.com/v1beta";
var url = $"{apiBase.TrimEnd('/')}/models/{model}:generateContent?key={apiKey}";
var reqBody = new
{
system_instruction = string.IsNullOrEmpty(systemPrompt) ? null : new
{
parts = new[] { new { text = systemPrompt } }
},
contents = new[]
{
new
{
role = "user",
parts = new[] { new { text = userPrompt } }
}
}
};
var jsonBody = JsonSerializer.Serialize(reqBody, new JsonSerializerOptions { DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull });
using var request = new HttpRequestMessage(HttpMethod.Post, url);
request.Content = new StringContent(jsonBody, Encoding.UTF8, "application/json");
var response = await _httpClient.SendAsync(request);
var responseBody = await response.Content.ReadAsStringAsync();
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Gemini API error status {(int)response.StatusCode}: {responseBody}");
}
var result = JsonSerializer.Deserialize<JsonElement>(responseBody);
if (result.TryGetProperty("candidates", out var candidates) && candidates.GetArrayLength() > 0)
{
var content = candidates[0].GetProperty("content");
if (content.TryGetProperty("parts", out var parts) && parts.GetArrayLength() > 0)
{
return parts[0].GetProperty("text").GetString() ?? string.Empty;
}
}
throw new Exception("empty response from Gemini API");
}
public async Task<string> SendRequestBatchAsync(string systemPrompt, string userPrompt, string model)
{
var apiKey = _configuration["GEMINI_API_KEY"];
if (string.IsNullOrEmpty(apiKey))
throw new Exception("GEMINI_API_KEY not found in configuration.");
var apiBase = "https://generativelanguage.googleapis.com/v1beta";
var jsonOptions = new JsonSerializerOptions { DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull };
// Build single inline request
var batchBody = new
{
batch = new
{
display_name = "mindforge-batch",
input_config = new
{
requests = new
{
requests = new[]
{
new
{
request = new
{
system_instruction = string.IsNullOrEmpty(systemPrompt) ? null : new
{
parts = new[] { new { text = systemPrompt } }
},
contents = new[]
{
new
{
role = "user",
parts = new[] { new { text = userPrompt } }
}
}
},
metadata = new { key = "request-1" }
}
}
}
}
}
};
// Submit batch job
var createUrl = $"{apiBase}/models/{model}:batchGenerateContent?key={apiKey}";
using var createReq = new HttpRequestMessage(HttpMethod.Post, createUrl);
createReq.Content = new StringContent(JsonSerializer.Serialize(batchBody, jsonOptions), Encoding.UTF8, "application/json");
var createResp = await _httpClient.SendAsync(createReq);
var createBody = await createResp.Content.ReadAsStringAsync();
if (!createResp.IsSuccessStatusCode)
throw new Exception($"Gemini Batch API error creating job {(int)createResp.StatusCode}: {createBody}");
_logger.LogInformation("Gemini Batch API job created");
var createResult = JsonSerializer.Deserialize<JsonElement>(createBody);
if (!createResult.TryGetProperty("name", out var nameEl))
throw new Exception("Gemini Batch API did not return a job name.");
var batchName = nameEl.GetString()!;
var pollUrl = $"{apiBase}/{batchName}?key={apiKey}";
_logger.LogInformation("Gemini Batch API job name: {BatchName}", batchName);
// Poll until terminal state
while (true)
{
await Task.Delay(TimeSpan.FromSeconds(10));
using var pollReq = new HttpRequestMessage(HttpMethod.Get, pollUrl);
var pollResp = await _httpClient.SendAsync(pollReq);
var pollBody = await pollResp.Content.ReadAsStringAsync();
if (!pollResp.IsSuccessStatusCode)
throw new Exception($"Gemini Batch API error polling status {(int)pollResp.StatusCode}: {pollBody}");
var pollResult = JsonSerializer.Deserialize<JsonElement>(pollBody);
var metadata = pollResult.GetProperty("metadata");
var state = metadata.GetProperty("state");
_logger.LogInformation("Gemini Batch API job state: {State}", state.GetString());
switch (state.GetString())
{
case "BATCH_STATE_SUCCEEDED":
if (pollResult.TryGetProperty("response", out var batchResponse) &&
batchResponse.TryGetProperty("inlinedResponses", out var inlinedResponses) &&
inlinedResponses.TryGetProperty("inlinedResponses", out var inlinedResponsesInternal) &&
inlinedResponsesInternal.GetArrayLength() > 0)
{
_logger.LogInformation("Gemini Batch API job succeeded");
var first = inlinedResponsesInternal[0];
if (first.TryGetProperty("error", out var reqError))
throw new Exception($"Gemini Batch request error: {reqError}");
if (first.TryGetProperty("response", out var innerResponse) &&
innerResponse.TryGetProperty("candidates", out var candidates) &&
candidates.GetArrayLength() > 0)
{
var content = candidates[0].GetProperty("content");
if (content.TryGetProperty("parts", out var parts) && parts.GetArrayLength() > 0)
return parts[0].GetProperty("text").GetString() ?? string.Empty;
}
}
throw new Exception("Gemini Batch job succeeded but returned no content.");
case "BATCH_STATE_FAILED":
throw new Exception($"Gemini Batch job failed: {pollBody}");
case "BATCH_STATE_CANCELLED":
throw new Exception("Gemini Batch job was cancelled.");
case "BATCH_STATE_EXPIRED":
throw new Exception("Gemini Batch job expired before completing.");
// BATCH_STATE_PENDING / BATCH_STATE_RUNNING — keep polling
}
}
}
}
}

View File

@@ -4,7 +4,6 @@ namespace Mindforge.API.Providers
{ {
public interface ILlmApiProvider public interface ILlmApiProvider
{ {
Task<string> SendRequestAsync(string systemPrompt, string userPrompt, string model); Task<string> SendRequestAsync(string systemPrompt, string userPrompt);
Task<string> SendRequestBatchAsync(string systemPrompt, string userPrompt, string model);
} }
} }

View File

@@ -22,28 +22,29 @@ namespace Mindforge.API.Providers
_logger = logger; _logger = logger;
} }
public async Task<string> SendRequestAsync(string systemPrompt, string userPrompt, string model) public async Task<string> SendRequestAsync(string systemPrompt, string userPrompt)
{ {
var apiKey = _configuration["OPENAI_API_KEY"]; var apiUrl = _configuration["OPENAI_API_URL"];
if (string.IsNullOrEmpty(apiKey)) if (string.IsNullOrEmpty(apiUrl))
{ throw new Exception("OPENAI_API_URL not found in configuration.");
throw new Exception("OPENAI_API_KEY not found in configuration.");
}
var apiBase = "https://api.openai.com/v1"; var token = _configuration["OPENAI_TOKEN"];
var url = $"{apiBase.TrimEnd('/')}/responses"; if (string.IsNullOrEmpty(token))
throw new Exception("OPENAI_TOKEN not found in configuration.");
var model = _configuration["OPENAI_MODEL"];
if (string.IsNullOrEmpty(model))
throw new Exception("OPENAI_MODEL not found in configuration.");
var url = $"{apiUrl.TrimEnd('/')}/chat/completions";
var reqBody = new var reqBody = new
{ {
model = model, model = model,
input = new[] messages = new[]
{ {
new { role = "developer", content = systemPrompt }, new { role = "system", content = systemPrompt },
new { role = "user", content = userPrompt } new { role = "user", content = userPrompt }
},
reasoning = new
{
effort = "low"
} }
}; };
@@ -54,7 +55,7 @@ namespace Mindforge.API.Providers
for (int i = 0; i < 5; i++) for (int i = 0; i < 5; i++)
{ {
using var request = new HttpRequestMessage(HttpMethod.Post, url); using var request = new HttpRequestMessage(HttpMethod.Post, url);
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", apiKey); request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
request.Content = new StringContent(jsonBody, Encoding.UTF8, "application/json"); request.Content = new StringContent(jsonBody, Encoding.UTF8, "application/json");
try try
@@ -64,47 +65,30 @@ namespace Mindforge.API.Providers
if (!response.IsSuccessStatusCode) if (!response.IsSuccessStatusCode)
{ {
lastErr = new Exception($"OpenAI API error status {(int)response.StatusCode}: {responseBody}"); lastErr = new Exception($"API error status {(int)response.StatusCode}: {responseBody}");
await Task.Delay(TimeSpan.FromSeconds(1 << i)); await Task.Delay(TimeSpan.FromSeconds(1 << i));
continue; continue;
} }
var result = JsonSerializer.Deserialize<JsonElement>(responseBody); var result = JsonSerializer.Deserialize<JsonElement>(responseBody);
if (result.TryGetProperty("output", out var outputArray)) if (result.TryGetProperty("choices", out var choices) && choices.GetArrayLength() > 0)
{ {
foreach (var outputItem in outputArray.EnumerateArray()) var message = choices[0].GetProperty("message");
{ return message.GetProperty("content").GetString() ?? string.Empty;
if (outputItem.TryGetProperty("content", out var contentArray))
{
foreach (var contentItem in contentArray.EnumerateArray())
{
if (contentItem.TryGetProperty("text", out var textContent))
{
return textContent.GetString() ?? string.Empty;
}
}
}
}
} }
_logger.LogWarning("OpenAI API raw response: {responseBody}", responseBody); _logger.LogWarning("API raw response: {responseBody}", responseBody);
throw new Exception("Empty response from API.");
throw new Exception("empty response from OpenAI API");
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "Error in OpenAI API request"); _logger.LogError(ex, "Error in API request");
lastErr = ex; lastErr = ex;
await Task.Delay(TimeSpan.FromSeconds(1 << i)); await Task.Delay(TimeSpan.FromSeconds(1 << i));
} }
} }
throw new Exception($"failed to get OpenAI response after 5 attempts. Last error: {lastErr?.Message}", lastErr); throw new Exception($"Failed to get response after 5 attempts. Last error: {lastErr?.Message}", lastErr);
}
public async Task<string> SendRequestBatchAsync(string systemPrompt, string userPrompt, string model)
{
throw new NotImplementedException();
} }
} }
} }

View File

@@ -1,8 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using Mindforge.API.Models.Enums;
using Mindforge.API.Providers; using Mindforge.API.Providers;
using Mindforge.API.Services.Interfaces; using Mindforge.API.Services.Interfaces;
@@ -10,39 +6,16 @@ namespace Mindforge.API.Services
{ {
public class AgentService : IAgentService public class AgentService : IAgentService
{ {
private readonly IEnumerable<ILlmApiProvider> _providers; private readonly ILlmApiProvider _provider;
public AgentService(IEnumerable<ILlmApiProvider> providers) public AgentService(ILlmApiProvider provider)
{ {
_providers = providers; _provider = provider;
} }
public Task<string> ProcessRequestAsync(LlmProvider providerEnum, string systemPrompt, string userPrompt, string model) public Task<string> ProcessRequestAsync(string systemPrompt, string userPrompt)
{ {
ILlmApiProvider provider = providerEnum switch return _provider.SendRequestAsync(systemPrompt, userPrompt);
{
LlmProvider.OpenAI => _providers.OfType<OpenAIApiProvider>().FirstOrDefault()
?? throw new Exception("OpenAI provider not registered"),
LlmProvider.Gemini => _providers.OfType<GeminiApiProvider>().FirstOrDefault()
?? throw new Exception("Gemini provider not registered"),
_ => throw new Exception("Unknown provider")
};
return provider.SendRequestAsync(systemPrompt, userPrompt, model);
}
public Task<string> ProcessRequestBatchAsync(LlmProvider providerEnum, string systemPrompt, string userPrompt, string model)
{
ILlmApiProvider provider = providerEnum switch
{
LlmProvider.OpenAI => _providers.OfType<OpenAIApiProvider>().FirstOrDefault()
?? throw new Exception("OpenAI provider not registered"),
LlmProvider.Gemini => _providers.OfType<GeminiApiProvider>().FirstOrDefault()
?? throw new Exception("Gemini provider not registered"),
_ => throw new Exception("Unknown provider")
};
return provider.SendRequestBatchAsync(systemPrompt, userPrompt, model);
} }
} }
} }

View File

@@ -1,6 +1,4 @@
using System;
using System.Threading.Tasks; using System.Threading.Tasks;
using Mindforge.API.Models.Enums;
using Mindforge.API.Models.Requests; using Mindforge.API.Models.Requests;
using Mindforge.API.Services.Interfaces; using Mindforge.API.Services.Interfaces;
using Mindforge.API.Exceptions; using Mindforge.API.Exceptions;
@@ -11,9 +9,6 @@ namespace Mindforge.API.Services
{ {
private readonly IAgentService _agentService; private readonly IAgentService _agentService;
private const LlmProvider DefaultProvider = LlmProvider.OpenAI;
private const string DefaultModel = "gpt-5-mini";
public FileService(IAgentService agentService) public FileService(IAgentService agentService)
{ {
_agentService = agentService; _agentService = agentService;
@@ -55,7 +50,7 @@ Os resumos serão utilizados para concursos públicos, principalmente para a ban
string userPrompt = $"Arquivo: {request.FileName}\nConteúdo:\n{request.FileContent}"; string userPrompt = $"Arquivo: {request.FileName}\nConteúdo:\n{request.FileContent}";
return await _agentService.ProcessRequestAsync(DefaultProvider, systemPrompt, userPrompt, DefaultModel); return await _agentService.ProcessRequestAsync(systemPrompt, userPrompt);
} }
} }
} }

View File

@@ -1,5 +1,4 @@
using System.Threading.Tasks; using System.Threading.Tasks;
using Mindforge.API.Models.Enums;
using Mindforge.API.Models.Requests; using Mindforge.API.Models.Requests;
using Mindforge.API.Services.Interfaces; using Mindforge.API.Services.Interfaces;
@@ -10,9 +9,6 @@ namespace Mindforge.API.Services
private readonly IAgentService _agentService; private readonly IAgentService _agentService;
private readonly ILogger<FlashcardService> _logger; private readonly ILogger<FlashcardService> _logger;
private const LlmProvider DefaultProvider = LlmProvider.Gemini;
private string DefaultModel = "gemini-3.1-flash-image-preview";
public FlashcardService(IAgentService agentService, ILogger<FlashcardService> logger) public FlashcardService(IAgentService agentService, ILogger<FlashcardService> logger)
{ {
_agentService = agentService; _agentService = agentService;
@@ -21,25 +17,12 @@ namespace Mindforge.API.Services
public async Task<string> GenerateFlashcardsAsync(FlashcardGenerateRequest request) public async Task<string> GenerateFlashcardsAsync(FlashcardGenerateRequest request)
{ {
var extraPrompt = ""; var extraPrompt = request.Mode switch
switch (request.Mode)
{ {
case FlashcardMode.Basic: FlashcardMode.Detailed => "Crie flashcards mais detalhados.",
DefaultModel = "gemini-3.1-flash-lite-preview"; FlashcardMode.Hyper => "Adicione também pequenas questões para fixação, para que o usuário possa testar seus conhecimentos. As questões devem ser curtas e objetivas, como se fosse cobradas em prova mesmo.",
break; _ => ""
case FlashcardMode.Simple: };
DefaultModel = "gemini-3.1-flash-image-preview";
break;
case FlashcardMode.Detailed:
DefaultModel = "gemini-3.1-flash-image-preview";
extraPrompt = "Crie flashcards mais detalhados.";
break;
case FlashcardMode.Hyper:
DefaultModel = "gemini-3.1-pro-preview";
extraPrompt = "Adicione também pequenas questões para fixação, para que o usuário possa testar seus conhecimentos. As questões devem ser curtas e objetivas, como se fosse cobradas em prova mesmo.";
break;
}
string systemPrompt = $@"Você é um assistente educacional especializado em criar flashcards para o Anki. string systemPrompt = $@"Você é um assistente educacional especializado em criar flashcards para o Anki.
Baseado no texto fornecido, crie exatamente {request.Amount} flashcards que focam nos conceitos mais importantes e difíceis. Baseado no texto fornecido, crie exatamente {request.Amount} flashcards que focam nos conceitos mais importantes e difíceis.
@@ -57,8 +40,7 @@ Com base no arquivo fornecido, crie exatamente {request.Amount} flashcards que f
string userPrompt = $"Arquivo: {request.FileName}\nConteúdo:\n{request.FileContent}"; string userPrompt = $"Arquivo: {request.FileName}\nConteúdo:\n{request.FileContent}";
//var result = await _agentService.ProcessRequestAsync(DefaultProvider, systemPrompt, userPrompt, DefaultModel); var result = await _agentService.ProcessRequestAsync(systemPrompt, userPrompt);
var result = await _agentService.ProcessRequestBatchAsync(DefaultProvider, systemPrompt, userPrompt, DefaultModel);
var lines = result.Split('\n'); var lines = result.Split('\n');

View File

@@ -1,11 +1,9 @@
using System.Threading.Tasks; using System.Threading.Tasks;
using Mindforge.API.Models.Enums;
namespace Mindforge.API.Services.Interfaces namespace Mindforge.API.Services.Interfaces
{ {
public interface IAgentService public interface IAgentService
{ {
Task<string> ProcessRequestAsync(LlmProvider provider, string systemPrompt, string userPrompt, string model); Task<string> ProcessRequestAsync(string systemPrompt, string userPrompt);
Task<string> ProcessRequestBatchAsync(LlmProvider provider, string systemPrompt, string userPrompt, string model);
} }
} }

View File

@@ -6,8 +6,9 @@
} }
}, },
"AllowedHosts": "*", "AllowedHosts": "*",
"OPENAI_API_KEY": "", "OPENAI_API_URL": "https://openrouter.ai/api/v1",
"GEMINI_API_KEY": "", "OPENAI_TOKEN": "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",
"OPENAI_MODEL": "openai/gpt-5.4-mini",
"GITEA_REPO_URL": "", "GITEA_REPO_URL": "",
"GITEA_ACCESS_TOKEN": "" "GITEA_ACCESS_TOKEN": ""
} }

View File

@@ -20,16 +20,15 @@ spec:
ports: ports:
- containerPort: 8080 - containerPort: 8080
env: env:
- name: OPENAI_API_KEY - name: OPENAI_TOKEN
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:
name: mindforge-secrets name: mindforge-secrets
key: OPENAI_API_KEY key: OPENAI_TOKEN
- name: GEMINI_API_KEY - name: OPENAI_API_URL
valueFrom: value: https://openrouter.ai/api/v1
secretKeyRef: - name: OPENAI_MODEL
name: mindforge-secrets value: openai/gpt-5.4-mini
key: GEMINI_API_KEY
- name: GITEA_REPO_URL - name: GITEA_REPO_URL
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:

View File

@@ -1,14 +1,9 @@
GIT_REPOSITORY=https://git.url/user/repo.git GIT_REPOSITORY=https://git.url/user/repo.git
OPENAI_API_KEY=openai_api_key
GEMINI_API_KEY=gemini_api_key
DISCORD_WEBHOOK_URL=discord_webhook_channel_url DISCORD_WEBHOOK_URL=discord_webhook_channel_url
# LLM provider per agent function ("openai" or "gemini", defaults to "openai") # OpenAI-compatible provider (e.g. OpenRouter)
SUMMARY_CREATOR_PROVIDER=gemini OPENAI_API_URL=https://openrouter.ai/api/v1
SUMMARY_FORMATTER_PROVIDER=openai OPENAI_TOKEN=your_token_here
OPENAI_MODEL=openai/gpt-5.4-mini
# LLM models
GEMINI_MODEL=gemini-3-flash-preview
OPENAI_MODEL=gpt-5-mini
TOP_N_FILES=10 TOP_N_FILES=10

View File

@@ -22,16 +22,15 @@ spec:
secretKeyRef: secretKeyRef:
name: mindforge-secrets name: mindforge-secrets
key: GIT_REPOSITORY key: GIT_REPOSITORY
- name: GEMINI_API_KEY - name: OPENAI_TOKEN
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:
name: mindforge-secrets name: mindforge-secrets
key: GEMINI_API_KEY key: OPENAI_TOKEN
- name: OPENAI_API_KEY - name: OPENAI_API_URL
valueFrom: value: https://openrouter.ai/api/v1
secretKeyRef: - name: OPENAI_MODEL
name: mindforge-secrets value: openai/gpt-5.4-mini
key: OPENAI_API_KEY
- name: DISCORD_WEBHOOK_URL - name: DISCORD_WEBHOOK_URL
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:
@@ -42,14 +41,6 @@ spec:
secretKeyRef: secretKeyRef:
name: mindforge-secrets name: mindforge-secrets
key: HAVEN_NOTIFY_URL key: HAVEN_NOTIFY_URL
- name: SUMMARY_CREATOR_PROVIDER
value: gemini
- name: SUMMARY_FORMATTER_PROVIDER
value: openai
- name: GEMINI_MODEL
value: gemini-3-flash-preview
- name: OPENAI_MODEL
value: gpt-5-mini
- name: TOP_N_FILES - name: TOP_N_FILES
value: "10" value: "10"
- name: LAST_N_DAYS - name: LAST_N_DAYS

View File

@@ -2,50 +2,11 @@ package agent
import ( import (
"fmt" "fmt"
"os"
"path/filepath" "path/filepath"
"strings"
"mindforge.cronjob/internal/llm" "mindforge.cronjob/internal/llm"
) )
// Provider represents the LLM provider to use.
type Provider string
const (
ProviderOpenAI Provider = "openai"
ProviderGemini Provider = "gemini"
)
// providerFromEnv reads the provider for a given agent from an env var,
// defaulting to OpenAI if not set or unrecognised.
func providerFromEnv(envKey string) Provider {
val := strings.ToLower(strings.TrimSpace(os.Getenv(envKey)))
if val == string(ProviderGemini) {
return ProviderGemini
}
return ProviderOpenAI
}
// send routes the request to the given LLM provider.
func send(provider Provider, systemPrompt, userPrompt string) (string, error) {
llmService := llm.NewLLMService()
switch provider {
case ProviderGemini:
geminiModel := os.Getenv("GEMINI_MODEL")
if geminiModel == "" {
geminiModel = "gemini-3.1-flash-lite-preview"
}
return llmService.SendGeminiRequest(systemPrompt, userPrompt, geminiModel)
default:
openaiModel := os.Getenv("OPENAI_MODEL")
if openaiModel == "" {
openaiModel = "gpt-5-mini"
}
return llmService.SendOpenAIRequest(systemPrompt, userPrompt, openaiModel)
}
}
// SummaryCreatorAgent creates a summary of the git diff for a specific file. // SummaryCreatorAgent creates a summary of the git diff for a specific file.
func SummaryCreatorAgent(filePath, gitDiff string) (string, error) { func SummaryCreatorAgent(filePath, gitDiff string) (string, error) {
fileName := filepath.Base(filePath) fileName := filepath.Base(filePath)
@@ -66,7 +27,7 @@ Responda sempre em Português do Brasil (pt-BR).`
userPrompt := fmt.Sprintf("Caminho do arquivo: %s\nPasta (Assunto Principal): %s\nArquivo (Assunto Específico): %s\n\nGit Diff:\n%s", filePath, folderName, fileName, gitDiff) userPrompt := fmt.Sprintf("Caminho do arquivo: %s\nPasta (Assunto Principal): %s\nArquivo (Assunto Específico): %s\n\nGit Diff:\n%s", filePath, folderName, fileName, gitDiff)
return send(providerFromEnv("SUMMARY_CREATOR_PROVIDER"), systemPrompt, userPrompt) return llm.NewLLMService().Send(systemPrompt, userPrompt)
} }
// SummaryFormatterAgent formats a plain text summary into Markdown. // SummaryFormatterAgent formats a plain text summary into Markdown.
@@ -82,5 +43,5 @@ Regras de formatação:
Responda sempre em Português do Brasil (pt-BR).` Responda sempre em Português do Brasil (pt-BR).`
return send(providerFromEnv("SUMMARY_FORMATTER_PROVIDER"), systemPrompt, summary) return llm.NewLLMService().Send(systemPrompt, summary)
} }

View File

@@ -1,86 +0,0 @@
package llm
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"strings"
"time"
)
func (s *llmService) SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error) {
apiKey := getEnvConfig("GEMINI_API_KEY")
if apiKey == "" {
return "", errors.New("GEMINI_API_KEY not found in .env or environment")
}
apiBase := "https://generativelanguage.googleapis.com/v1beta"
url := fmt.Sprintf("%s/models/%s:generateContent?key=%s", strings.TrimRight(apiBase, "/"), model, apiKey)
reqBody := map[string]interface{}{}
if systemPrompt != "" {
reqBody["system_instruction"] = map[string]interface{}{
"parts": []map[string]string{
{"text": systemPrompt},
},
}
}
reqBody["contents"] = []map[string]interface{}{
{
"role": "user",
"parts": []map[string]string{
{"text": userPrompt},
},
},
}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return "", err
}
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonBody))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{Timeout: 120 * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("Gemini API error status %d: %s", resp.StatusCode, string(bodyBytes))
}
var result struct {
Candidates []struct {
Content struct {
Parts []struct {
Text string `json:"text"`
} `json:"parts"`
} `json:"content"`
} `json:"candidates"`
}
if err := json.Unmarshal(bodyBytes, &result); err != nil {
return "", err
}
if len(result.Candidates) > 0 && len(result.Candidates[0].Content.Parts) > 0 {
return result.Candidates[0].Content.Parts[0].Text, nil
}
return "", errors.New("empty response from Gemini API")
}

View File

@@ -6,8 +6,7 @@ import (
// Service defines the interface for connecting to LLMs // Service defines the interface for connecting to LLMs
type Service interface { type Service interface {
SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error) Send(systemPrompt string, userPrompt string) (string, error)
SendGeminiRequest(systemPrompt string, userPrompt string, model string) (string, error)
} }
type llmService struct{} type llmService struct{}

View File

@@ -11,15 +11,23 @@ import (
"time" "time"
) )
func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, model string) (string, error) { func (s *llmService) Send(systemPrompt string, userPrompt string) (string, error) {
apiKey := getEnvConfig("OPENAI_API_KEY") apiURL := getEnvConfig("OPENAI_API_URL")
if apiKey == "" { if apiURL == "" {
return "", errors.New("OPENAI_API_KEY not found in .env or environment") return "", errors.New("OPENAI_API_URL not found in environment")
} }
apiBase := "https://api.openai.com/v1" token := getEnvConfig("OPENAI_TOKEN")
if token == "" {
return "", errors.New("OPENAI_TOKEN not found in environment")
}
url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiBase, "/")) model := getEnvConfig("OPENAI_MODEL")
if model == "" {
return "", errors.New("OPENAI_MODEL not found in environment")
}
url := fmt.Sprintf("%s/chat/completions", strings.TrimRight(apiURL, "/"))
reqBody := map[string]interface{}{ reqBody := map[string]interface{}{
"model": model, "model": model,
@@ -42,7 +50,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
return "", err return "", err
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey) req.Header.Set("Authorization", "Bearer "+token)
client := &http.Client{Timeout: 120 * time.Second} client := &http.Client{Timeout: 120 * time.Second}
resp, err := client.Do(req) resp, err := client.Do(req)
@@ -62,7 +70,7 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
} }
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
lastErr = fmt.Errorf("OpenAI API error status %d: %s", resp.StatusCode, string(bodyBytes)) lastErr = fmt.Errorf("API error status %d: %s", resp.StatusCode, string(bodyBytes))
time.Sleep(time.Second * time.Duration(1<<i)) time.Sleep(time.Second * time.Duration(1<<i))
continue continue
} }
@@ -81,8 +89,8 @@ func (s *llmService) SendOpenAIRequest(systemPrompt string, userPrompt string, m
if len(result.Choices) > 0 { if len(result.Choices) > 0 {
return result.Choices[0].Message.Content, nil return result.Choices[0].Message.Content, nil
} }
return "", errors.New("empty response from OpenAI API") return "", errors.New("empty response from API")
} }
return "", fmt.Errorf("failed to get OpenAI response after 5 attempts. Last error: %v", lastErr) return "", fmt.Errorf("failed to get response after 5 attempts. Last error: %v", lastErr)
} }