Skip to content

Commit 33230bb

Browse files
Merge pull request #1 from EvolutionAPI/develop
feat(api_key): list provider models dynamically
2 parents 0dd157c + 746adc2 commit 33230bb

2 files changed

Lines changed: 303 additions & 0 deletions

File tree

pkg/api_key/handler/api_key_handler.go

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ type ApiKeyHandler interface {
2424
List(c *gin.Context)
2525
Update(c *gin.Context)
2626
Delete(c *gin.Context)
27+
GetModels(c *gin.Context)
2728
}
2829

2930
// apiKeyHandler implements the ApiKeyHandler interface
@@ -59,6 +60,9 @@ func (h *apiKeyHandler) RegisterRoutesMiddleware(router gin.IRouter) {
5960
apiKeys.GET("/:id",
6061
permissionMiddleware.RequirePermission("ai_api_keys", "read"),
6162
h.GetByID)
63+
apiKeys.GET("/:id/models",
64+
permissionMiddleware.RequirePermission("ai_api_keys", "read"),
65+
h.GetModels)
6266

6367
// Create permissions
6468
apiKeys.POST("",
@@ -80,6 +84,20 @@ func (h *apiKeyHandler) RegisterRoutesMiddleware(router gin.IRouter) {
8084
}
8185
}
8286

87+
func (h *apiKeyHandler) decryptKey(encrypted string) (string, error) {
88+
fernetKey, err := fernet.DecodeKey(h.encryptionKey)
89+
if err != nil {
90+
return "", fmt.Errorf("invalid encryption key: %w", err)
91+
}
92+
93+
plain := fernet.VerifyAndDecrypt([]byte(encrypted), 0, []*fernet.Key{fernetKey})
94+
if plain == nil {
95+
return "", fmt.Errorf("failed to decrypt api key")
96+
}
97+
98+
return string(plain), nil
99+
}
100+
83101
func (h *apiKeyHandler) encryptKey(key string) (string, error) {
84102
// Use Fernet encryption with a fixed key from environment
85103
// This key is shared with evo-ai-processor for decryption
@@ -256,6 +274,55 @@ func (h *apiKeyHandler) Update(c *gin.Context) {
256274
response.SuccessResponse(c, updatedApiKey.ToResponse(), "API key updated successfully", http.StatusOK)
257275
}
258276

277+
// GetModels returns the list of models available for the provider associated
278+
// with the given API key, by calling the provider's models endpoint with the
279+
// decrypted key. The caller passes only the key ID — the plaintext key never
280+
// leaves the server.
281+
func (h *apiKeyHandler) GetModels(c *gin.Context) {
282+
id, err := uuid.Parse(c.Param("id"))
283+
if err != nil {
284+
code, message, httpCode := errors.HandleError(err)
285+
response.ErrorResponse(c, code, message, nil, httpCode)
286+
return
287+
}
288+
289+
apiKey, err := h.apiKeyService.GetByID(c.Request.Context(), id)
290+
if err != nil {
291+
code, message, httpCode := errors.HandleError(err)
292+
response.ErrorResponse(c, code, message, nil, httpCode)
293+
return
294+
}
295+
296+
if !service.ProviderSupportsDynamicModels(apiKey.Provider) {
297+
response.SuccessResponse(c, gin.H{
298+
"provider": apiKey.Provider,
299+
"supported": false,
300+
"models": []service.ModelInfo{},
301+
}, "Provider does not support dynamic model listing", http.StatusOK)
302+
return
303+
}
304+
305+
plainKey, err := h.decryptKey(apiKey.Key)
306+
if err != nil {
307+
code, message, httpCode := errors.HandleError(err)
308+
response.ErrorResponse(c, code, message, nil, httpCode)
309+
return
310+
}
311+
312+
models, err := service.FetchProviderModels(c.Request.Context(), apiKey.Provider, plainKey)
313+
if err != nil {
314+
code, message, httpCode := errors.HandleError(fmt.Errorf("failed to fetch models from provider: %w", err))
315+
response.ErrorResponse(c, code, message, nil, httpCode)
316+
return
317+
}
318+
319+
response.SuccessResponse(c, gin.H{
320+
"provider": apiKey.Provider,
321+
"supported": true,
322+
"models": models,
323+
}, "Models retrieved successfully", http.StatusOK)
324+
}
325+
259326
// Delete handles the delete api key request
260327
func (h *apiKeyHandler) Delete(c *gin.Context) {
261328
id, err := uuid.Parse(c.Param("id"))
Lines changed: 236 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,236 @@
1+
package service
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"sort"
7+
"strings"
8+
9+
"evo-ai-core-service/internal/httpclient"
10+
)
11+
12+
// ModelInfo is the normalized shape the frontend ModelSelector consumes.
13+
type ModelInfo struct {
14+
Value string `json:"value"`
15+
Label string `json:"label"`
16+
Provider string `json:"provider"`
17+
}
18+
19+
// ProviderSupportsDynamicModels reports whether the provider has an HTTP API
20+
// we can query to list its models. Providers that require vendor SDKs
21+
// (Bedrock, Vertex AI) or have no public listing endpoint (Perplexity) fall
22+
// back to the frontend's hardcoded list.
23+
func ProviderSupportsDynamicModels(provider string) bool {
24+
switch provider {
25+
case "openai", "anthropic", "gemini", "openrouter", "deepseek", "together_ai", "fireworks_ai":
26+
return true
27+
}
28+
return false
29+
}
30+
31+
// FetchProviderModels calls the provider's models endpoint using the caller's
32+
// key and returns a normalized list sorted by label. The returned slice is
33+
// never nil — an empty slice with a nil error means the provider responded
34+
// but had nothing to offer.
35+
func FetchProviderModels(ctx context.Context, provider, apiKeyPlain string) ([]ModelInfo, error) {
36+
var (
37+
models []ModelInfo
38+
err error
39+
)
40+
switch provider {
41+
case "openai":
42+
models, err = fetchOpenAICompatible(ctx, "https://api.openai.com/v1/models", apiKeyPlain, provider)
43+
case "deepseek":
44+
models, err = fetchOpenAICompatible(ctx, "https://api.deepseek.com/models", apiKeyPlain, provider)
45+
case "together_ai":
46+
models, err = fetchOpenAICompatible(ctx, "https://api.together.xyz/v1/models", apiKeyPlain, provider)
47+
case "fireworks_ai":
48+
models, err = fetchOpenAICompatible(ctx, "https://api.fireworks.ai/inference/v1/models", apiKeyPlain, provider)
49+
case "openrouter":
50+
models, err = fetchOpenAICompatible(ctx, "https://openrouter.ai/api/v1/models", apiKeyPlain, provider)
51+
case "anthropic":
52+
models, err = fetchAnthropic(ctx, apiKeyPlain)
53+
case "gemini":
54+
models, err = fetchGemini(ctx, apiKeyPlain)
55+
default:
56+
return nil, fmt.Errorf("dynamic model listing not supported for provider %q", provider)
57+
}
58+
if err != nil {
59+
return nil, err
60+
}
61+
sort.Slice(models, func(i, j int) bool { return models[i].Label < models[j].Label })
62+
if models == nil {
63+
models = []ModelInfo{}
64+
}
65+
return models, nil
66+
}
67+
68+
// openAIListResponse covers the common `{ data: [{ id: "..." }] }` shape used
69+
// by OpenAI and every provider that mimics its API (DeepSeek, Together,
70+
// Fireworks, OpenRouter — which adds an optional `name`).
71+
type openAIListResponse struct {
72+
Data []struct {
73+
ID string `json:"id"`
74+
Name string `json:"name"`
75+
} `json:"data"`
76+
}
77+
78+
func fetchOpenAICompatible(ctx context.Context, url, apiKey, provider string) ([]ModelInfo, error) {
79+
headers := map[string]string{
80+
"Authorization": "Bearer " + apiKey,
81+
"Accept": "application/json",
82+
}
83+
resp, err := httpclient.DoGetJSON[openAIListResponse](ctx, url, headers, 200)
84+
if err != nil {
85+
return nil, err
86+
}
87+
out := make([]ModelInfo, 0, len(resp.Data))
88+
for _, m := range resp.Data {
89+
if m.ID == "" || !isChatCapableID(m.ID) {
90+
continue
91+
}
92+
label := m.Name
93+
if label == "" {
94+
label = m.ID
95+
}
96+
out = append(out, ModelInfo{
97+
Value: provider + "/" + m.ID,
98+
Label: label,
99+
Provider: provider,
100+
})
101+
}
102+
return out, nil
103+
}
104+
105+
// isChatCapableID returns true when the model ID looks like a general-purpose
106+
// chat/completion model suitable for driving an agent. The `/v1/models`
107+
// endpoint on OpenAI-compatible APIs returns every model the account can
108+
// touch — embeddings, transcription, TTS, image generation, moderation,
109+
// old fine-tunes — and none of those belong in the agent model picker.
110+
//
111+
// Filter is intentionally permissive: it accepts known chat families and
112+
// drops anything that clearly belongs to another modality. When a provider
113+
// ships a new chat family we don't recognize yet, the user can still type it
114+
// in via the "Custom Model" input.
115+
func isChatCapableID(id string) bool {
116+
lower := strings.ToLower(id)
117+
118+
// Drop fine-tunes and org-scoped custom models (colon-separated segments).
119+
if strings.Contains(id, ":ft-") || strings.Contains(lower, ":ft:") {
120+
return false
121+
}
122+
123+
// Drop known non-chat modalities by substring match.
124+
nonChat := []string{
125+
"embedding", "embed-",
126+
"whisper", "tts", "audio", "transcribe", "realtime", "voice",
127+
"dall-e", "image", "imagen", "sora", "video",
128+
"moderation",
129+
"computer-use",
130+
"search-preview", "deep-research",
131+
}
132+
for _, kw := range nonChat {
133+
if strings.Contains(lower, kw) {
134+
return false
135+
}
136+
}
137+
138+
// Drop OpenAI's legacy completion-only families and instruct variants.
139+
if strings.Contains(lower, "instruct") {
140+
return false
141+
}
142+
legacyPrefixes := []string{"babbage", "davinci", "curie", "ada-", "text-ada", "text-babbage", "text-curie", "text-davinci"}
143+
for _, p := range legacyPrefixes {
144+
if strings.HasPrefix(lower, p) {
145+
return false
146+
}
147+
}
148+
149+
// Accept known chat families. If a provider's naming doesn't match any of
150+
// these, we still accept it so new families aren't silently hidden —
151+
// the non-chat keywords above already carry most of the filtering.
152+
return true
153+
}
154+
155+
type anthropicListResponse struct {
156+
Data []struct {
157+
ID string `json:"id"`
158+
DisplayName string `json:"display_name"`
159+
Type string `json:"type"`
160+
} `json:"data"`
161+
}
162+
163+
func fetchAnthropic(ctx context.Context, apiKey string) ([]ModelInfo, error) {
164+
headers := map[string]string{
165+
"x-api-key": apiKey,
166+
"anthropic-version": "2023-06-01",
167+
"Accept": "application/json",
168+
}
169+
resp, err := httpclient.DoGetJSON[anthropicListResponse](ctx, "https://api.anthropic.com/v1/models?limit=1000", headers, 200)
170+
if err != nil {
171+
return nil, err
172+
}
173+
out := make([]ModelInfo, 0, len(resp.Data))
174+
for _, m := range resp.Data {
175+
if m.ID == "" {
176+
continue
177+
}
178+
label := m.DisplayName
179+
if label == "" {
180+
label = m.ID
181+
}
182+
out = append(out, ModelInfo{
183+
Value: "anthropic/" + m.ID,
184+
Label: label,
185+
Provider: "anthropic",
186+
})
187+
}
188+
return out, nil
189+
}
190+
191+
type geminiListResponse struct {
192+
Models []struct {
193+
Name string `json:"name"`
194+
DisplayName string `json:"displayName"`
195+
SupportedGenerationMethods []string `json:"supportedGenerationMethods"`
196+
} `json:"models"`
197+
}
198+
199+
func fetchGemini(ctx context.Context, apiKey string) ([]ModelInfo, error) {
200+
url := "https://generativelanguage.googleapis.com/v1beta/models?key=" + apiKey + "&pageSize=1000"
201+
resp, err := httpclient.DoGetJSON[geminiListResponse](ctx, url, map[string]string{"Accept": "application/json"}, 200)
202+
if err != nil {
203+
return nil, err
204+
}
205+
out := make([]ModelInfo, 0, len(resp.Models))
206+
for _, m := range resp.Models {
207+
// Gemini returns names like "models/gemini-1.5-pro" — strip the prefix.
208+
id := strings.TrimPrefix(m.Name, "models/")
209+
if id == "" {
210+
continue
211+
}
212+
// Only include models that can actually be used for chat.
213+
if !supportsGenerateContent(m.SupportedGenerationMethods) {
214+
continue
215+
}
216+
label := m.DisplayName
217+
if label == "" {
218+
label = id
219+
}
220+
out = append(out, ModelInfo{
221+
Value: "gemini/" + id,
222+
Label: label,
223+
Provider: "gemini",
224+
})
225+
}
226+
return out, nil
227+
}
228+
229+
func supportsGenerateContent(methods []string) bool {
230+
for _, m := range methods {
231+
if m == "generateContent" {
232+
return true
233+
}
234+
}
235+
return false
236+
}

0 commit comments

Comments
 (0)