From 9fa3a730d54c2c5694626148d1e0932cc1f94516 Mon Sep 17 00:00:00 2001 From: Kent Dong Date: Tue, 18 Mar 2025 10:23:34 +0800 Subject: [PATCH] feat: Support forwarding embedding calls to Ollama in ai-proxy (#1913) --- plugins/wasm-go/extensions/ai-proxy/provider/ollama.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/wasm-go/extensions/ai-proxy/provider/ollama.go b/plugins/wasm-go/extensions/ai-proxy/provider/ollama.go index b43843a86..0eef02b14 100644 --- a/plugins/wasm-go/extensions/ai-proxy/provider/ollama.go +++ b/plugins/wasm-go/extensions/ai-proxy/provider/ollama.go @@ -29,6 +29,7 @@ func (m *ollamaProviderInitializer) DefaultCapabilities() map[string]string { return map[string]string{ // ollama的chat接口path和OpenAI的chat接口一样 string(ApiNameChatCompletion): PathOpenAIChatCompletions, + string(ApiNameEmbeddings): PathOpenAIEmbeddings, } } @@ -60,7 +61,7 @@ func (m *ollamaProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiNa func (m *ollamaProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) { if !m.config.isSupportedAPI(apiName) { - return types.ActionContinue, errUnsupportedApiName + return types.ActionContinue, nil } return m.config.handleRequestBody(m, m.contextCache, ctx, apiName, body, log) }