#1736Implement cloudflare e2e test case (#2998)

This commit is contained in:
Patrisam
2025-10-20 19:47:36 +08:00
committed by GitHub
parent a7cd4c0ad6
commit 817cd322ff
2 changed files with 89 additions and 0 deletions

View File

@@ -777,6 +777,66 @@ data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":""},
data: [DONE]
`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "cloudflare case 1: non-streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.cloudflare.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":false}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"id":"chatcmpl-llm-mock","choices":[{"index":0,"message":{"role":"assistant","content":"你好,你是谁?"},"finish_reason":"stop","logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion","usage":{"prompt_tokens":9,"completion_tokens":1,"total_tokens":10}}`),
},
},
},
{
Meta: http.AssertionMeta{
TestCaseName: "cloudflare case 2: streaming request",
CompareTarget: http.CompareTargetResponse,
},
Request: http.AssertionRequest{
ActualRequest: http.Request{
Host: "api.cloudflare.com",
Path: "/v1/chat/completions",
Method: "POST",
ContentType: http.ContentTypeApplicationJson,
Body: []byte(`{"model":"gpt-3","messages":[{"role":"user","content":"你好,你是谁?"}],"stream":true}`),
},
},
Response: http.AssertionResponse{
ExpectedResponse: http.Response{
StatusCode: 200,
ContentType: http.ContentTypeTextEventStream,
Body: []byte(`data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"好"},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":""},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"你"},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"是"},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":"谁"},"finish_reason":null,"logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: {"id":"chatcmpl-llm-mock","choices":[{"index":0,"delta":{"content":""},"finish_reason":"stop","logprobs":null}],"created":10,"model":"meta/llama-3.1-8b-instruct","object":"chat.completion.chunk","usage":null}
data: [DONE]
`),
},
},

View File

@@ -260,6 +260,25 @@ spec:
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-cloudflare
namespace: higress-conformance-ai-backend
spec:
ingressClassName: higress
rules:
- host: "api.cloudflare.com"
http:
paths:
- pathType: Prefix
path: "/"
backend:
service:
name: llm-mock-service
port:
number: 3000
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: wasmplugin-ai-proxy-together-ai
namespace: higress-conformance-ai-backend
@@ -520,6 +539,16 @@ spec:
type: stepfun
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-stepfun
- config:
provider:
apiTokens:
- fake_token
modelMapping:
"gpt-3": meta/llama-3.1-8b-instruct
"*": meta/llama-3.1-70b-instruct
type: cloudflare
ingress:
- higress-conformance-ai-backend/wasmplugin-ai-proxy-cloudflare
- config:
provider:
apiTokens: