From ee26baf054ee29f6165fb65c745927ceacc2cc58 Mon Sep 17 00:00:00 2001 From: VinciWu557 <78317518+VinciWu557@users.noreply.github.com> Date: Tue, 3 Jun 2025 19:31:58 +0800 Subject: [PATCH] feat: support dify ai-proxy e2e test || feat: support diify ai-proxy e2e test (#2319) --- .../extensions/ai-proxy/provider/dify.go | 13 ++-- test/e2e/conformance/base/llm-mock.yaml | 2 +- .../e2e/conformance/tests/go-wasm-ai-proxy.go | 66 +++++++++++++++++++ .../conformance/tests/go-wasm-ai-proxy.yaml | 29 ++++++++ 4 files changed, 103 insertions(+), 7 deletions(-) diff --git a/plugins/wasm-go/extensions/ai-proxy/provider/dify.go b/plugins/wasm-go/extensions/ai-proxy/provider/dify.go index 93bfca2bb..2f0d10800 100644 --- a/plugins/wasm-go/extensions/ai-proxy/provider/dify.go +++ b/plugins/wasm-go/extensions/ai-proxy/provider/dify.go @@ -6,13 +6,13 @@ import ( "fmt" "net/http" "strings" - "time" + + "github.com/higress-group/proxy-wasm-go-sdk/proxywasm" + "github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types" "github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util" "github.com/alibaba/higress/plugins/wasm-go/pkg/log" "github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper" - "github.com/higress-group/proxy-wasm-go-sdk/proxywasm" - "github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types" ) const ( @@ -138,7 +138,7 @@ func (d *difyProvider) responseDify2OpenAI(ctx wrapper.HttpContext, response *Di } return &chatCompletionResponse{ Id: id, - Created: time.Now().UnixMilli() / 1000, + Created: response.CreatedAt, Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""), SystemFingerprint: "", Object: objectChatCompletion, @@ -222,7 +222,7 @@ func (d *difyProvider) streamResponseDify2OpenAI(ctx wrapper.HttpContext, respon } return &chatCompletionResponse{ Id: id, - Created: time.Now().UnixMilli() / 1000, + Created: response.CreatedAt, Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""), SystemFingerprint: "", Object: objectChatCompletionChunk, @@ -309,7 +309,7 @@ type DifyChatResponse struct { ConversationId string `json:"conversation_id"` MessageId string `json:"message_id"` Answer string `json:"answer"` - CreateAt int64 `json:"create_at"` + CreatedAt int64 `json:"created_at"` Data DifyData `json:"data"` MetaData DifyMetaData `json:"metadata"` } @@ -319,6 +319,7 @@ type DifyChunkChatResponse struct { ConversationId string `json:"conversation_id"` MessageId string `json:"message_id"` Answer string `json:"answer"` + CreatedAt int64 `json:"created_at"` Data DifyData `json:"data"` MetaData DifyMetaData `json:"metadata"` } diff --git a/test/e2e/conformance/base/llm-mock.yaml b/test/e2e/conformance/base/llm-mock.yaml index f515a12b5..cedaa206e 100644 --- a/test/e2e/conformance/base/llm-mock.yaml +++ b/test/e2e/conformance/base/llm-mock.yaml @@ -29,7 +29,7 @@ metadata: spec: containers: - name: llm-mock - image: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/llm-mock:latest + image: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/llm-mock-server:latest ports: - containerPort: 3000 --- diff --git a/test/e2e/conformance/tests/go-wasm-ai-proxy.go b/test/e2e/conformance/tests/go-wasm-ai-proxy.go index 97a001f6f..4d7e8f02e 100644 --- a/test/e2e/conformance/tests/go-wasm-ai-proxy.go +++ b/test/e2e/conformance/tests/go-wasm-ai-proxy.go @@ -957,6 +957,72 @@ data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"?"}} data: [DONE] +`), + }, + }, + }, + { + Meta: http.AssertionMeta{ + TestCaseName: "dify case 1: non-streaming completion request", + CompareTarget: http.CompareTargetResponse, + }, + Request: http.AssertionRequest{ + ActualRequest: http.Request{ + Host: "api.dify.ai", + Path: "/v1/chat/completions", + Method: "POST", + ContentType: http.ContentTypeApplicationJson, + Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好"}],"stream":false}`), + }, + }, + Response: http.AssertionResponse{ + ExpectedResponse: http.Response{ + StatusCode: 200, + ContentType: http.ContentTypeApplicationJson, + Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"USER: \n你好\n"},"finish_reason":"stop"}],"created":10,"model":"dify","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`), + }, + }, + }, + { + Meta: http.AssertionMeta{ + TestCaseName: "dify case 2: streaming completion request", + CompareTarget: http.CompareTargetResponse, + }, + Request: http.AssertionRequest{ + ActualRequest: http.Request{ + Host: "api.dify.ai", + Path: "/v1/chat/completions", + Method: "POST", + ContentType: http.ContentTypeApplicationJson, + Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好"}],"stream":true}`), + }, + }, + Response: http.AssertionResponse{ + ExpectedResponse: http.Response{ + StatusCode: 200, + ContentType: http.ContentTypeTextEventStream, + Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"U"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"S"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"E"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"R"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":":"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":" "}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"\n"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"你"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"好"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"\n"}}],"created":10,"model":"dify","object":"chat.completion.chunk","usage":{}} + +data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"role":"assistant","content":"USER: \n你好\n"},"finish_reason":"stop"}],"model":"dify","object":"chat.completion.chunk","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}} + `), }, }, diff --git a/test/e2e/conformance/tests/go-wasm-ai-proxy.yaml b/test/e2e/conformance/tests/go-wasm-ai-proxy.yaml index f64ec7728..d4025e6f7 100644 --- a/test/e2e/conformance/tests/go-wasm-ai-proxy.yaml +++ b/test/e2e/conformance/tests/go-wasm-ai-proxy.yaml @@ -315,6 +315,25 @@ spec: port: number: 3000 --- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: wasmplugin-ai-proxy-dify + namespace: higress-conformance-ai-backend +spec: + ingressClassName: higress + rules: + - host: "api.dify.ai" + http: + paths: + - pathType: Prefix + path: "/" + backend: + service: + name: llm-mock-service + port: + number: 3000 +--- apiVersion: extensions.higress.io/v1alpha1 kind: WasmPlugin metadata: @@ -493,4 +512,14 @@ spec: type: zhipuai ingress: - higress-conformance-ai-backend/wasmplugin-ai-proxy-zhipuai + - config: + provider: + apiTokens: + - fake_token + modelMapping: + '*': dify + type: dify + botType: Completion + ingress: + - higress-conformance-ai-backend/wasmplugin-ai-proxy-dify url: file:///opt/plugins/wasm-go/extensions/ai-proxy/plugin.wasm \ No newline at end of file