Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion gateway/it/db_helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -186,12 +186,19 @@ func GetStoredRestAPISourceConfiguration(ctx context.Context, handle string) (st
// upserts synchronously on the request path, but in CI we occasionally see the
// row not visible to a separate sqlite3 process for a few hundred ms.
func GetStoredRestAPISourceConfigurationWithRetry(ctx context.Context, handle string) (string, error) {
return GetStoredSourceConfigurationWithRetry(ctx, "RestApi", "rest_apis", handle)
}

// GetStoredSourceConfigurationWithRetry generalises GetStoredRestAPISourceConfigurationWithRetry
// to any artifact kind/table pair so template-rendering ITs can assert DB
// persistence for LlmProvider, LlmProxy, and Mcp in addition to RestApi.
func GetStoredSourceConfigurationWithRetry(ctx context.Context, kind, table, handle string) (string, error) {
const maxAttempts = 10
const interval = 200 * time.Millisecond

var lastErr error
for attempt := 0; attempt < maxAttempts; attempt++ {
row, err := GetStoredRestAPISourceConfiguration(ctx, handle)
row, err := queryStoredConfiguration(ctx, kind, table, handle)
if err == nil {
return row, nil
}
Expand Down
206 changes: 206 additions & 0 deletions gateway/it/features/template-functions.feature
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,212 @@ Feature: Template functions in RestApi spec
When I delete the API "tpl-default-api-v1.0"
Then the response should be successful

Scenario: secret template in LlmProvider upstream auth value is rendered upstream but unrendered in response and DB
When I create a secret named "tpl-llm-provider-token" with value "llm-prov-secret-789"
Then the response status should be 201

Given I authenticate using basic auth as "admin"
When I create this LLM provider:
"""
apiVersion: gateway.api-platform.wso2.com/v1alpha1
kind: LlmProvider
metadata:
name: tpl-llm-provider
spec:
displayName: Tpl-Llm-Provider
version: v1.0
template: openai
context: /tpl-llm-provider
upstream:
url: http://echo-backend-multi-arch:8080/anything
auth:
type: api-key
header: Authorization
value: 'Bearer {{ secret "tpl-llm-provider-token" }}'
accessControl:
mode: allow_all
"""
Then the response status code should be 201
And the response body should contain template literal:
"""
{{ secret "tpl-llm-provider-token" }}
"""

# GET response must echo the unrendered template body
Given I authenticate using basic auth as "admin"
When I retrieve the LLM provider "tpl-llm-provider"
Then the response status code should be 200
And the response body should contain template literal:
"""
{{ secret "tpl-llm-provider-token" }}
"""

# DB must persist the unrendered template body
And the stored LlmProvider configuration for "tpl-llm-provider" should contain:
"""
{{ secret "tpl-llm-provider-token" }}
"""

# Runtime: upstream must receive the resolved Authorization header value
And I wait for the endpoint "http://localhost:8080/tpl-llm-provider/chat/completions" to be ready with method "POST" and body '{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}'
When I set header "Content-Type" to "application/json"
And I send a POST request to "http://localhost:8080/tpl-llm-provider/chat/completions" with body:
"""
{
"model": "gpt-4",
"messages": [{"role": "user", "content": "Hello"}]
}
"""
Then the response status code should be 200
And the response should contain echoed header "Authorization" with value "Bearer llm-prov-secret-789"

# Cleanup
Given I authenticate using basic auth as "admin"
When I delete the LLM provider "tpl-llm-provider"
Then the response status code should be 200
When I delete the secret "tpl-llm-provider-token"
Then the response status should be 200

Scenario: secret template in LlmProxy set-headers policy is rendered upstream but unrendered in response and DB
When I create a secret named "tpl-llm-proxy-token" with value "llm-proxy-secret-456"
Then the response status should be 201

# Plain (un-templated) provider used as the proxy upstream
Given I authenticate using basic auth as "admin"
When I create this LLM provider:
"""
apiVersion: gateway.api-platform.wso2.com/v1alpha1
kind: LlmProvider
metadata:
name: tpl-llm-proxy-provider
spec:
displayName: Tpl-Llm-Proxy-Provider
version: v1.0
template: openai
upstream:
url: http://echo-backend-multi-arch:8080/anything
accessControl:
mode: allow_all
"""
Then the response status code should be 201

Given I authenticate using basic auth as "admin"
When I deploy this LLM proxy configuration:
"""
apiVersion: gateway.api-platform.wso2.com/v1alpha1
kind: LlmProxy
metadata:
name: tpl-llm-proxy
spec:
displayName: Tpl-Llm-Proxy
version: v1.0
context: /tpl-llm-proxy
provider:
id: tpl-llm-proxy-provider
policies:
- name: set-headers
version: v1
paths:
- path: /chat/completions
methods: [POST]
params:
request:
headers:
- name: X-Auth-Token
value: 'Bearer {{ secret "tpl-llm-proxy-token" }}'
"""
Then the response status should be 201
And the response body should contain template literal:
"""
{{ secret "tpl-llm-proxy-token" }}
"""

Given I authenticate using basic auth as "admin"
When I send a GET request to the "gateway-controller" service at "/llm-proxies/tpl-llm-proxy"
Then the response status code should be 200
And the response body should contain template literal:
"""
{{ secret "tpl-llm-proxy-token" }}
"""

And the stored LlmProxy configuration for "tpl-llm-proxy" should contain:
"""
{{ secret "tpl-llm-proxy-token" }}
"""

# Runtime: upstream must receive the resolved X-Auth-Token header value
And I wait for the endpoint "http://localhost:8080/tpl-llm-proxy/chat/completions" to be ready with method "POST" and body '{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}'
When I set header "Content-Type" to "application/json"
And I send a POST request to "http://localhost:8080/tpl-llm-proxy/chat/completions" with body:
"""
{
"model": "gpt-4",
"messages": [{"role": "user", "content": "Hello"}]
}
"""
Then the response status code should be 200
And the response should contain echoed header "X-Auth-Token" with value "Bearer llm-proxy-secret-456"

# Cleanup
Given I authenticate using basic auth as "admin"
When I send a DELETE request to the "gateway-controller" service at "/llm-proxies/tpl-llm-proxy"
Then the response should be successful
When I delete the LLM provider "tpl-llm-proxy-provider"
Then the response status code should be 200
When I delete the secret "tpl-llm-proxy-token"
Then the response status should be 200

Scenario: env template in McpProxy upstream URL resolves at runtime but is unrendered in response and DB
When I deploy this MCP configuration:
"""
apiVersion: gateway.api-platform.wso2.com/v1alpha1
kind: Mcp
metadata:
name: tpl-mcp-v1.0
spec:
displayName: Tpl-Mcp
version: v1.0
context: /tpl-mcp
specVersion: "2025-06-18"
upstream:
url: 'http://mcp-server-backend:3001{{ env "IT_DEFINITELY_MISSING_KEY" | default "" }}'
tools: []
resources: []
prompts: []
"""
Then the response should be successful
And the response body should contain template literal:
"""
{{ env "IT_DEFINITELY_MISSING_KEY" | default "" }}
"""

Given I authenticate using basic auth as "admin"
When I send a GET request to the "gateway-controller" service at "/mcp-proxies/tpl-mcp-v1.0"
Then the response status code should be 200
And the response body should contain template literal:
"""
{{ env "IT_DEFINITELY_MISSING_KEY" | default "" }}
"""

And the stored Mcp configuration for "tpl-mcp-v1.0" should contain:
"""
{{ env "IT_DEFINITELY_MISSING_KEY" | default "" }}
"""

# Runtime: upstream URL must have resolved to the bare mcp-server-backend host
And I wait for 2 seconds
When I use the MCP Client to send an initialize request to "http://127.0.0.1:8080/tpl-mcp/mcp"
Then the response should be successful
When I use the MCP Client to send "add" tools/call request to "http://127.0.0.1:8080/tpl-mcp/mcp"
Then the response should be successful
And the response should be valid JSON
And the JSON response field "result.content[0].text" should contain "The sum of 40 and 60 is 100."

# Cleanup
Given I authenticate using basic auth as "admin"
When I delete the MCP proxy "tpl-mcp-v1.0"
Then the response should be successful

Scenario: missing secret reference fails with 400 at deploy time
When I deploy this API configuration:
"""
Expand Down
34 changes: 25 additions & 9 deletions gateway/it/steps_template.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,17 @@ func RegisterTemplateSteps(ctx *godog.ScenarioContext, state *TestState, httpSte
t := NewTemplateSteps(state, httpSteps)

ctx.Step(`^the response body should contain template literal:$`, t.responseBodyShouldContainLiteral)
ctx.Step(`^the stored RestApi configuration for "([^"]*)" should contain:$`, t.storedRestAPIShouldContain)
ctx.Step(`^the stored (RestApi|LlmProvider|LlmProxy|Mcp) configuration for "([^"]*)" should contain:$`, t.storedConfigurationShouldContain)
}

// kindTables maps the Gherkin-facing artifact kind to the per-kind storage
// table. The values mirror the schemas in
// gateway-controller/pkg/storage/gateway-controller-db.sql.
var kindTables = map[string]string{
"RestApi": "rest_apis",
"LlmProvider": "llm_providers",
"LlmProxy": "llm_proxies",
"Mcp": "mcp_proxies",
}

// responseBodyShouldContainLiteral checks that the last response body contains
Expand Down Expand Up @@ -77,25 +87,31 @@ func containsLiteralOrJSONEscaped(haystack, needle string) bool {
return jsonEscaped != needle && strings.Contains(haystack, jsonEscaped)
}

// storedRestAPIShouldContain queries the controller's SQLite DB via the
// it-db-reader sidecar and asserts the unrendered SourceConfiguration blob for
// the given RestApi handle contains the supplied literal. Used to verify that
// the persisted configuration retains template expressions verbatim.
func (t *TemplateSteps) storedRestAPIShouldContain(handle string, literal *godog.DocString) error {
// storedConfigurationShouldContain queries the controller's DB via the
// reader sidecar and asserts the unrendered configuration blob persisted for
// the given artifact kind/handle contains the supplied literal. Used to verify
// that the persisted configuration retains template expressions verbatim
// across all kinds that the renderer touches (RestApi, LlmProvider, LlmProxy,
// Mcp).
func (t *TemplateSteps) storedConfigurationShouldContain(kind, handle string, literal *godog.DocString) error {
expected := strings.TrimSpace(literal.Content)
if expected == "" {
return fmt.Errorf("expected literal is empty")
}
table, ok := kindTables[kind]
if !ok {
return fmt.Errorf("unknown artifact kind %q (supported: RestApi, LlmProvider, LlmProxy, Mcp)", kind)
}

ctx, cancel := context.WithTimeout(context.Background(), defaultDBQueryTimeout)
defer cancel()

row, err := GetStoredRestAPISourceConfigurationWithRetry(ctx, handle)
row, err := GetStoredSourceConfigurationWithRetry(ctx, kind, table, handle)
if err != nil {
return fmt.Errorf("failed to read stored configuration for %q: %w", handle, err)
return fmt.Errorf("failed to read stored %s configuration for %q: %w", kind, handle, err)
}
if !containsLiteralOrJSONEscaped(row, expected) {
return fmt.Errorf("stored configuration for %q does not contain expected template literal\nexpected substring: %q\nstored row: %s", handle, expected, row)
return fmt.Errorf("stored %s configuration for %q does not contain expected template literal\nexpected substring: %q\nstored row: %s", kind, handle, expected, row)
}
return nil
}
Loading