mirror of
https://github.com/alibaba/higress.git
synced 2026-03-08 02:30:56 +08:00
Compare commits
90 Commits
plugins/wa
...
wasm-go-ex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04a9104062 | ||
|
|
564f8c770a | ||
|
|
fec2e9dfc9 | ||
|
|
dc4ddb52ee | ||
|
|
6f221ead53 | ||
|
|
53f8410843 | ||
|
|
a17ac9e4c6 | ||
|
|
5e95f6f057 | ||
|
|
94f29e56c0 | ||
|
|
870157c576 | ||
|
|
c78ef7011d | ||
|
|
dc0dcaaaee | ||
|
|
34f5722d93 | ||
|
|
55fdddee2f | ||
|
|
980ffde244 | ||
|
|
0a578c2a04 | ||
|
|
536a3069a8 | ||
|
|
08c64ed467 | ||
|
|
cc74c0da93 | ||
|
|
210b97b06b | ||
|
|
bccfbde621 | ||
|
|
f1c6e78047 | ||
|
|
1c415c60c3 | ||
|
|
59acb61926 | ||
|
|
29079f4e2a | ||
|
|
95edce024d | ||
|
|
b6d07a157c | ||
|
|
10569f49ae | ||
|
|
2a588c99c7 | ||
|
|
0cfef34bff | ||
|
|
5c2b5d5750 | ||
|
|
8f483518a9 | ||
|
|
f6ee4ed166 | ||
|
|
9a9e924037 | ||
|
|
e7d66f691f | ||
|
|
8c48fcb423 | ||
|
|
ef31e09310 | ||
|
|
c0f2cafdc8 | ||
|
|
d5a9ff3a98 | ||
|
|
f069ad5b0d | ||
|
|
85219b6c53 | ||
|
|
5041277be3 | ||
|
|
c00c8827f9 | ||
|
|
46218058d1 | ||
|
|
5306385e6b | ||
|
|
4e881fdd3f | ||
|
|
59aa3b5488 | ||
|
|
c40cf85aad | ||
|
|
7c749b864c | ||
|
|
74ddbf02f6 | ||
|
|
60c56a16ab | ||
|
|
5a2c6835f7 | ||
|
|
12a5612450 | ||
|
|
b9f5c4d1f2 | ||
|
|
d7bdcbd026 | ||
|
|
dd284d1f24 | ||
|
|
a7ee523c98 | ||
|
|
4bbfb131ee | ||
|
|
6fd71f9749 | ||
|
|
e0159f501a | ||
|
|
56226d5052 | ||
|
|
086a9cc973 | ||
|
|
e389313aa3 | ||
|
|
f64c601264 | ||
|
|
9c6ea109f8 | ||
|
|
4ca2d23404 | ||
|
|
0ce52de59b | ||
|
|
81e459da01 | ||
|
|
63539ca15c | ||
|
|
1eea75f130 | ||
|
|
d333656cc3 | ||
|
|
51dca7055a | ||
|
|
ab1bc0a73a | ||
|
|
ffee7dc5ea | ||
|
|
1ea87f0e7a | ||
|
|
7164653446 | ||
|
|
2a1a391054 | ||
|
|
0785d4aac4 | ||
|
|
4ca4bec2b5 | ||
|
|
174350d3fb | ||
|
|
0380cb03d3 | ||
|
|
15d9f76ff9 | ||
|
|
5f15017963 | ||
|
|
634de3f7f8 | ||
|
|
12cc44b324 | ||
|
|
d53c713561 | ||
|
|
5acc6f73b2 | ||
|
|
2db0b60a98 | ||
|
|
c6e3db95e0 | ||
|
|
ed976c6d06 |
114
.github/workflows/build-and-push-wasm-plugin-image.yaml
vendored
Normal file
114
.github/workflows/build-and-push-wasm-plugin-image.yaml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Build and Push Wasm Plugin Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "wasm-go-*-v*.*.*" # 匹配 wasm-go-{pluginName}-vX.Y.Z 格式的标签
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin_name:
|
||||
description: 'Name of the plugin'
|
||||
required: true
|
||||
type: string
|
||||
version:
|
||||
description: 'Version of the plugin (optional, without leading v)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build-and-push-wasm-plugin-image:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: image-registry-msg
|
||||
env:
|
||||
IMAGE_REGISTRY_SERVICE: ${{ vars.IMAGE_REGISTRY || 'higress-registry.cn-hangzhou.cr.aliyuncs.com' }}
|
||||
IMAGE_REPOSITORY: ${{ vars.PLUGIN_IMAGE_REPOSITORY || 'plugins' }}
|
||||
GO_VERSION: 1.19
|
||||
TINYGO_VERSION: 0.28.1
|
||||
ORAS_VERSION: 1.0.0
|
||||
steps:
|
||||
- name: Set plugin_name and version from inputs or ref_name
|
||||
id: set_vars
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
plugin_name="${{ github.event.inputs.plugin_name }}"
|
||||
version="${{ github.event.inputs.version }}"
|
||||
else
|
||||
ref_name=${{ github.ref_name }}
|
||||
plugin_name=${ref_name#*-*-} # 删除插件名前面的字段(wasm-go-)
|
||||
plugin_name=${plugin_name%-*} # 删除插件名后面的字段(-vX.Y.Z)
|
||||
version=$(echo "$ref_name" | awk -F'v' '{print $2}')
|
||||
fi
|
||||
|
||||
echo "PLUGIN_NAME=$plugin_name" >> $GITHUB_ENV
|
||||
echo "VERSION=$version" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: File Check
|
||||
run: |
|
||||
workspace=${{ github.workspace }}/plugins/wasm-go/extensions/${PLUGIN_NAME}
|
||||
push_command="./plugin.tar.gz:application/vnd.oci.image.layer.v1.tar+gzip"
|
||||
|
||||
# 查找spec.yaml
|
||||
if [ -f "${workspace}/spec.yaml" ]; then
|
||||
echo "spec.yaml exists"
|
||||
push_command="./spec.yaml:application/vnd.module.wasm.spec.v1+yaml $push_command "
|
||||
fi
|
||||
|
||||
# 查找README.md
|
||||
if [ -f "${workspace}/README.md" ];then
|
||||
echo "README.md exists"
|
||||
push_command="./README.md:application/vnd.module.wasm.doc.v1+markdown $push_command "
|
||||
fi
|
||||
|
||||
# 查找README_{lang}.md
|
||||
for file in ${workspace}/README_*.md; do
|
||||
if [ -f "$file" ]; then
|
||||
file_name=$(basename $file)
|
||||
echo "$file_name exists"
|
||||
lang=$(basename $file | sed 's/README_//; s/.md//')
|
||||
push_command="./$file_name:application/vnd.module.wasm.doc.v1.$lang+markdown $push_command "
|
||||
fi
|
||||
done
|
||||
|
||||
echo "PUSH_COMMAND=\"$push_command\"" >> $GITHUB_ENV
|
||||
|
||||
- name: Run a wasm-go-builder
|
||||
env:
|
||||
PLUGIN_NAME: ${{ env.PLUGIN_NAME }}
|
||||
BUILDER_IMAGE: higress-registry.cn-hangzhou.cr.aliyuncs.com/plugins/wasm-go-builder:go${{ env.GO_VERSION }}-tinygo${{ env.TINYGO_VERSION }}-oras${{ env.ORAS_VERSION }}
|
||||
run: |
|
||||
docker run -itd --name builder -v ${{ github.workspace }}:/workspace -e PLUGIN_NAME=${{ env.PLUGIN_NAME }} --rm ${{ env.BUILDER_IMAGE }} /bin/bash
|
||||
|
||||
- name: Build Image and Push
|
||||
run: |
|
||||
push_command=${{ env.PUSH_COMMAND }}
|
||||
push_command=${push_command#\"}
|
||||
push_command=${push_command%\"} # 删除PUSH_COMMAND中的双引号,确保oras push正常解析
|
||||
|
||||
target_image="${{ env.IMAGE_REGISTRY_SERVICE }}/${{ env.IMAGE_REPOSITORY}}/${{ env.PLUGIN_NAME }}:${{ env.VERSION }}"
|
||||
echo "TargetImage=${target_image}"
|
||||
|
||||
cd ${{ github.workspace }}/plugins/wasm-go/extensions/${PLUGIN_NAME}
|
||||
if [ -f ./.buildrc ]; then
|
||||
echo 'Found .buildrc file, sourcing it...'
|
||||
. ./.buildrc
|
||||
else
|
||||
echo '.buildrc file not found'
|
||||
fi
|
||||
echo "EXTRA_TAGS=${EXTRA_TAGS}"
|
||||
|
||||
command="
|
||||
set -e
|
||||
cd /workspace/plugins/wasm-go/extensions/${PLUGIN_NAME}
|
||||
go mod tidy
|
||||
tinygo build -o ./plugin.wasm -scheduler=none -target=wasi -gc=custom -tags=\"custommalloc nottinygc_finalizer ${EXTRA_TAGS}\" .
|
||||
tar czvf plugin.tar.gz plugin.wasm
|
||||
echo ${{ secrets.REGISTRY_PASSWORD }} | oras login -u ${{ secrets.REGISTRY_USERNAME }} --password-stdin ${{ env.IMAGE_REGISTRY_SERVICE }}
|
||||
oras push ${target_image} ${push_command}
|
||||
"
|
||||
docker exec builder bash -c "$command"
|
||||
|
||||
|
||||
16
.github/workflows/build-and-test-plugin.yaml
vendored
16
.github/workflows/build-and-test-plugin.yaml
vendored
@@ -17,8 +17,8 @@ jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
# There are too many lint errors in current code bases
|
||||
@@ -30,9 +30,9 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
# TODO(Xunzhuo): Enable C WASM Filters in CI
|
||||
wasmPluginType: [ GO ]
|
||||
wasmPluginType: [ GO, RUST ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Free Up GitHub Actions Ubuntu Runner Disk Space 🔧
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
@@ -45,12 +45,12 @@ jobs:
|
||||
swap-storage: true
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
.git/modules
|
||||
@@ -81,4 +81,4 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ higress-wasmplugin-test ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
30
.github/workflows/build-and-test.yaml
vendored
30
.github/workflows/build-and-test.yaml
vendored
@@ -10,8 +10,8 @@ jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
# There are too many lint errors in current code bases
|
||||
@@ -21,10 +21,10 @@ jobs:
|
||||
coverage-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
.git/modules
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
- name: Run Coverage Tests
|
||||
run: GOPROXY="https://proxy.golang.org,direct" make go.test.coverage
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: false
|
||||
files: ./coverage.xml
|
||||
@@ -58,17 +58,17 @@ jobs:
|
||||
needs: [lint,coverage-test]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }}"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
.git/modules
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
run: GOPROXY="https://proxy.golang.org,direct" make build
|
||||
|
||||
- name: Upload Higress Binary
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: higress
|
||||
path: out/
|
||||
@@ -108,12 +108,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
.git/modules
|
||||
@@ -139,4 +139,4 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [higress-conformance-test,gateway-conformance-test]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
36
.github/workflows/build-image-and-push.yaml
vendored
36
.github/workflows/build-image-and-push.yaml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
CONTROLLER_IMAGE_NAME: ${{ vars.CONTROLLER_IMAGE_NAME || 'higress/higress' }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }}"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -31,12 +31,12 @@ jobs:
|
||||
swap-storage: true
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
envoy
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
|
||||
- name: Calculate Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.CONTROLLER_IMAGE_REGISTRY }}/${{ env.CONTROLLER_IMAGE_NAME }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.CONTROLLER_IMAGE_REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
PILOT_IMAGE_NAME: ${{ vars.PILOT_IMAGE_NAME || 'higress/pilot' }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }}"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -107,12 +107,12 @@ jobs:
|
||||
swap-storage: true
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
envoy
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
|
||||
- name: Calculate Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.PILOT_IMAGE_REGISTRY }}/${{ env.PILOT_IMAGE_NAME }}
|
||||
@@ -143,7 +143,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.PILOT_IMAGE_REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
@@ -169,7 +169,7 @@ jobs:
|
||||
GATEWAY_IMAGE_NAME: ${{ vars.GATEWAY_IMAGE_NAME || 'higress/gateway' }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }}"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
@@ -184,12 +184,12 @@ jobs:
|
||||
swap-storage: true
|
||||
|
||||
- name: "Setup Go"
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.19
|
||||
|
||||
- name: Setup Golang Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
~/.cache/go-build
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-go
|
||||
|
||||
- name: Setup Submodule Caches
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |-
|
||||
envoy
|
||||
@@ -209,7 +209,7 @@ jobs:
|
||||
|
||||
- name: Calculate Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.GATEWAY_IMAGE_REGISTRY }}/${{ env.GATEWAY_IMAGE_NAME }}
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.GATEWAY_IMAGE_REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
|
||||
8
.github/workflows/codeql-analysis.yaml
vendored
8
.github/workflows/codeql-analysis.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
steps:
|
||||
# step 1
|
||||
- name: "Checkout repository"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# step 2: Initializes the CodeQL tools for scanning.
|
||||
- name: "Initialize CodeQL"
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: "Autobuild"
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# step 4
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
@@ -66,4 +66,4 @@ jobs:
|
||||
|
||||
# step 5
|
||||
- name: "Perform CodeQL Analysis"
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
steps:
|
||||
# Step 1
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
# Step 2
|
||||
- id: package
|
||||
name: Prepare Standalone Package
|
||||
|
||||
2
.github/workflows/deploy-to-oss.yaml
vendored
2
.github/workflows/deploy-to-oss.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
steps:
|
||||
# Step 1
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
# Step 2
|
||||
- name: Download Helm Charts Index
|
||||
uses: doggycool/ossutil-github-action@master
|
||||
|
||||
4
.github/workflows/latest-release.yaml
vendored
4
.github/workflows/latest-release.yaml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
latest-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build hgctl latest multiarch binaries
|
||||
run: |
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
GITHUB_REPOSITORY: ${{ github.repository_owner }}/${{ github.event.repository.name }}
|
||||
|
||||
- name: Recreate the Latest Release and Tag
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: false
|
||||
prerelease: true
|
||||
|
||||
4
.github/workflows/license-checker.yaml
vendored
4
.github/workflows/license-checker.yaml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
steps:
|
||||
# step 1
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2.4.0
|
||||
uses: actions/checkout@v4
|
||||
# step 2
|
||||
- name: Check License Header
|
||||
uses: apache/skywalking-eyes/header@25edfc2fd8d52fb266653fb5f6c42da633d85c07
|
||||
@@ -24,4 +24,4 @@ jobs:
|
||||
with:
|
||||
log: info
|
||||
config: .licenserc.yaml
|
||||
mode: check
|
||||
mode: check
|
||||
|
||||
6
.github/workflows/release-hgctl.yaml
vendored
6
.github/workflows/release-hgctl.yaml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
env:
|
||||
HGCTL_VERSION: ${{github.ref_name}}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build hgctl latest multiarch binaries
|
||||
run: |
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
zip -q -r hgctl_${{ env.HGCTL_VERSION }}_windows_arm64.zip out/windows_arm64/
|
||||
|
||||
- name: Upload hgctl packages to the GitHub release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: |
|
||||
@@ -34,4 +34,4 @@ jobs:
|
||||
hgctl_${{ env.HGCTL_VERSION }}_darwin_amd64.tar.gz
|
||||
hgctl_${{ env.HGCTL_VERSION }}_darwin_arm64.tar.gz
|
||||
hgctl_${{ env.HGCTL_VERSION }}_windows_amd64.zip
|
||||
hgctl_${{ env.HGCTL_VERSION }}_windows_arm64.zip
|
||||
hgctl_${{ env.HGCTL_VERSION }}_windows_arm64.zip
|
||||
|
||||
@@ -138,11 +138,11 @@ export ENVOY_TAR_PATH:=/home/package/envoy.tar.gz
|
||||
|
||||
external/package/envoy-amd64.tar.gz:
|
||||
# cd external/proxy; BUILD_WITH_CONTAINER=1 make test_release
|
||||
cd external/package; wget -O envoy-amd64.tar.gz "https://github.com/alibaba/higress/releases/download/v1.4.0-rc.1/envoy-symbol-amd64.tar.gz"
|
||||
cd external/package; wget -O envoy-amd64.tar.gz "https://github.com/alibaba/higress/releases/download/v1.4.1/envoy-symbol-amd64.tar.gz"
|
||||
|
||||
external/package/envoy-arm64.tar.gz:
|
||||
# cd external/proxy; BUILD_WITH_CONTAINER=1 make test_release
|
||||
cd external/package; wget -O envoy-arm64.tar.gz "https://github.com/alibaba/higress/releases/download/v1.4.0-rc.1/envoy-symbol-arm64.tar.gz"
|
||||
cd external/package; wget -O envoy-arm64.tar.gz "https://github.com/alibaba/higress/releases/download/v1.4.1/envoy-symbol-arm64.tar.gz"
|
||||
|
||||
build-pilot:
|
||||
cd external/istio; rm -rf out/linux_amd64; GOOS_LOCAL=linux TARGET_OS=linux TARGET_ARCH=amd64 BUILD_WITH_CONTAINER=1 make build-linux
|
||||
@@ -177,8 +177,8 @@ install: pre-install
|
||||
cd helm/higress; helm dependency build
|
||||
helm install higress helm/higress -n higress-system --create-namespace --set 'global.local=true'
|
||||
|
||||
ENVOY_LATEST_IMAGE_TAG ?= sha-93966bf
|
||||
ISTIO_LATEST_IMAGE_TAG ?= sha-b00f79f
|
||||
ENVOY_LATEST_IMAGE_TAG ?= sha-59acb61
|
||||
ISTIO_LATEST_IMAGE_TAG ?= sha-59acb61
|
||||
|
||||
install-dev: pre-install
|
||||
helm install higress helm/core -n higress-system --create-namespace --set 'controller.tag=$(TAG)' --set 'gateway.replicas=1' --set 'pilot.tag=$(ISTIO_LATEST_IMAGE_TAG)' --set 'gateway.tag=$(ENVOY_LATEST_IMAGE_TAG)' --set 'global.local=true'
|
||||
|
||||
59
README.md
59
README.md
@@ -1,17 +1,17 @@
|
||||
<h1 align="center">
|
||||
<img src="https://img.alicdn.com/imgextra/i2/O1CN01NwxLDd20nxfGBjxmZ_!!6000000006895-2-tps-960-290.png" alt="Higress" width="240" height="72.5">
|
||||
<br>
|
||||
Cloud Native API Gateway
|
||||
AI Native API Gateway
|
||||
</h1>
|
||||
|
||||
[](https://github.com/alibaba/higress/actions)
|
||||
[](https://www.apache.org/licenses/LICENSE-2.0.html)
|
||||
|
||||
[**官网**](https://higress.io/) |
|
||||
[**文档**](https://higress.io/zh-cn/docs/overview/what-is-higress) |
|
||||
[**博客**](https://higress.io/zh-cn/blog) |
|
||||
[**开发指引**](https://higress.io/zh-cn/docs/developers/developers_dev) |
|
||||
[**Higress 企业版**](https://www.aliyun.com/product/aliware/mse?spm=higress-website.topbar.0.0.0)
|
||||
[**文档**](https://higress.io/docs/latest/user/quickstart/) |
|
||||
[**博客**](https://higress.io/blog/) |
|
||||
[**开发指引**](https://higress.io/docs/latest/dev/architecture/) |
|
||||
[**AI插件**](https://higress.io/plugin/)
|
||||
|
||||
|
||||
<p>
|
||||
@@ -19,9 +19,15 @@
|
||||
</p>
|
||||
|
||||
|
||||
Higress 是基于阿里内部两年多的 Envoy Gateway 实践沉淀,以开源 [Istio](https://github.com/istio/istio) 与 [Envoy](https://github.com/envoyproxy/envoy) 为核心构建的云原生 API 网关。Higress 实现了安全防护网关、流量网关、微服务网关三层网关合一,可以显著降低网关的部署和运维成本。
|
||||
Higress 是基于阿里内部多年的 Envoy Gateway 实践沉淀,以开源 [Istio](https://github.com/istio/istio) 与 [Envoy](https://github.com/envoyproxy/envoy) 为核心构建的云原生 API 网关。
|
||||
|
||||
Higress 是面向 AI 原生设计的 API 网关,在阿里内部,承载了通义千问 APP、百炼大模型 API、机器学习 PAI 平台等 AI 业务的流量。
|
||||
|
||||
Higress 能够用统一的协议对接国内外所有 LLM 模型厂商,同时具备丰富的 AI 可观测、多模型负载均衡/fallback、AI token 流控、AI 缓存等能力:
|
||||
|
||||

|
||||
|
||||
|
||||

|
||||
|
||||
## Summary
|
||||
|
||||
@@ -34,6 +40,10 @@ Higress 是基于阿里内部两年多的 Envoy Gateway 实践沉淀,以开源
|
||||
|
||||
## 使用场景
|
||||
|
||||
- **AI 网关**:
|
||||
|
||||
Higress 提供了一站式的 AI 插件集,可以增强依赖 AI 能力业务的稳定性、灵活性、可观测性,使得业务与 AI 的集成更加便捷和高效。
|
||||
|
||||
- **Kubernetes Ingress 网关**:
|
||||
|
||||
Higress 可以作为 K8s 集群的 Ingress 入口网关, 并且兼容了大量 K8s Nginx Ingress 的注解,可以从 K8s Nginx Ingress 快速平滑迁移到 Higress。
|
||||
@@ -56,27 +66,36 @@ Higress 是基于阿里内部两年多的 Envoy Gateway 实践沉淀,以开源
|
||||
|
||||
脱胎于阿里巴巴2年多生产验证的内部产品,支持每秒请求量达数十万级的大规模场景。
|
||||
|
||||
彻底摆脱 reload 引起的流量抖动,配置变更毫秒级生效且业务无感。
|
||||
|
||||
- **平滑演进**
|
||||
彻底摆脱 Nginx reload 引起的流量抖动,配置变更毫秒级生效且业务无感。对 AI 业务等长连接场景特别友好。
|
||||
|
||||
支持 Nacos/Zookeeper/Eureka 等多种注册中心,可以不依赖 K8s Service 进行服务发现,支持非容器架构平滑演进到云原生架构。
|
||||
- **流式处理**
|
||||
|
||||
支持从 Nginx Ingress Controller 平滑迁移,支持平滑过渡到 Gateway API,支持业务架构平滑演进到 ServiceMesh。
|
||||
支持真正的完全流式处理请求/响应 Body,Wasm 插件很方便地自定义处理 SSE (Server-Sent Events)等流式协议的报文。
|
||||
|
||||
- **兼收并蓄**
|
||||
|
||||
兼容 Nginx Ingress Annotation 80%+ 的使用场景,且提供功能更丰富的 Higress Annotation 注解。
|
||||
|
||||
兼容 Ingress API/Gateway API/Istio API,可以组合多种 CRD 实现流量精细化管理。
|
||||
|
||||
在 AI 业务等大带宽场景下,可以显著降低内存开销。
|
||||
|
||||
- **便于扩展**
|
||||
|
||||
提供 Wasm、Lua、进程外三种插件扩展机制,支持多语言编写插件,生效粒度支持全局级、域名级,路由级。
|
||||
提供丰富的官方插件库,涵盖 AI、流量管理、安全防护等常用功能,满足90%以上的业务场景需求。
|
||||
|
||||
主打 Wasm 插件扩展,通过沙箱隔离确保内存安全,支持多种编程语言,允许插件版本独立升级,实现流量无损热更新网关逻辑。
|
||||
|
||||
- **安全易用**
|
||||
|
||||
基于 Ingress API 和 Gateway API 标准,提供开箱即用的 UI 控制台,WAF 防护插件、IP/Cookie CC 防护插件开箱即用。
|
||||
|
||||
支持对接 Let's Encrypt 自动签发和续签免费证书,并且可以脱离 K8s 部署,一行 Docker 命令即可启动,方便个人开发者使用。
|
||||
|
||||
插件支持热更新,变更插件逻辑和配置都对流量无损。
|
||||
|
||||
## 功能展示
|
||||
|
||||
### AI 网关 Demo 展示
|
||||
|
||||
[从 OpenAI 到其他大模型,30 秒完成迁移
|
||||
](https://www.bilibili.com/video/BV1dT421a7w7/?spm_id_from=333.788.recommend_more_video.14)
|
||||
|
||||
|
||||
### Higress UI 控制台
|
||||
|
||||
- **丰富的可观测**
|
||||
|
||||
|
||||
@@ -301,6 +301,7 @@ type MatchRule struct {
|
||||
Domain []string `protobuf:"bytes,2,rep,name=domain,proto3" json:"domain,omitempty"`
|
||||
Config *types.Struct `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"`
|
||||
ConfigDisable bool `protobuf:"varint,4,opt,name=config_disable,json=configDisable,proto3" json:"config_disable,omitempty"`
|
||||
Service []string `protobuf:"bytes,5,rep,name=service,proto3" json:"service,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -367,6 +368,13 @@ func (m *MatchRule) GetConfigDisable() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *MatchRule) GetService() []string {
|
||||
if m != nil {
|
||||
return m.Service
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterEnum("higress.extensions.v1alpha1.PluginPhase", PluginPhase_name, PluginPhase_value)
|
||||
proto.RegisterEnum("higress.extensions.v1alpha1.PullPolicy", PullPolicy_name, PullPolicy_value)
|
||||
@@ -377,46 +385,47 @@ func init() {
|
||||
func init() { proto.RegisterFile("extensions/v1alpha1/wasm.proto", fileDescriptor_4d60b240916c4e18) }
|
||||
|
||||
var fileDescriptor_4d60b240916c4e18 = []byte{
|
||||
// 619 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xdd, 0x4e, 0xdb, 0x4c,
|
||||
0x10, 0x86, 0x71, 0x02, 0x81, 0x4c, 0x80, 0xcf, 0xac, 0xbe, 0xd2, 0x15, 0x54, 0x69, 0x84, 0xd4,
|
||||
0xd6, 0xe5, 0xc0, 0x16, 0xa1, 0x3f, 0x27, 0x15, 0x6a, 0x80, 0xb4, 0x44, 0x6d, 0x53, 0xcb, 0x86,
|
||||
0x56, 0xe5, 0xc4, 0xda, 0x98, 0x8d, 0xb3, 0xea, 0xfa, 0x47, 0xde, 0x35, 0x34, 0x17, 0xd2, 0x7b,
|
||||
0xea, 0x61, 0x2f, 0xa1, 0xe2, 0x2e, 0x7a, 0x56, 0x65, 0x6d, 0x43, 0x42, 0xab, 0x9c, 0xed, 0xce,
|
||||
0x3c, 0x33, 0xf3, 0xbe, 0xe3, 0x95, 0xa1, 0x49, 0xbf, 0x49, 0x1a, 0x09, 0x16, 0x47, 0xc2, 0xba,
|
||||
0xdc, 0x23, 0x3c, 0x19, 0x91, 0x3d, 0xeb, 0x8a, 0x88, 0xd0, 0x4c, 0xd2, 0x58, 0xc6, 0x68, 0x7b,
|
||||
0xc4, 0x82, 0x94, 0x0a, 0x61, 0xde, 0x72, 0x66, 0xc9, 0x6d, 0x35, 0x83, 0x38, 0x0e, 0x38, 0xb5,
|
||||
0x14, 0x3a, 0xc8, 0x86, 0xd6, 0x55, 0x4a, 0x92, 0x84, 0xa6, 0x22, 0x2f, 0xde, 0x7a, 0x70, 0x37,
|
||||
0x2f, 0x64, 0x9a, 0xf9, 0x32, 0xcf, 0xee, 0xfc, 0x5e, 0x04, 0xf8, 0x4c, 0x44, 0x68, 0xf3, 0x2c,
|
||||
0x60, 0x11, 0xd2, 0xa1, 0x9a, 0xa5, 0x1c, 0x57, 0x5a, 0x9a, 0x51, 0x77, 0x26, 0x47, 0xb4, 0x09,
|
||||
0x35, 0x31, 0x22, 0xed, 0xe7, 0x2f, 0x70, 0x55, 0x05, 0x8b, 0x1b, 0x72, 0x61, 0x83, 0x85, 0x24,
|
||||
0xa0, 0x5e, 0x92, 0x71, 0xee, 0x25, 0x31, 0x67, 0xfe, 0x18, 0x2f, 0xb6, 0x34, 0x63, 0xbd, 0xfd,
|
||||
0xc4, 0x9c, 0xa3, 0xd7, 0xb4, 0x33, 0xce, 0x6d, 0x85, 0x3b, 0xff, 0xa9, 0x0e, 0xb7, 0x01, 0xb4,
|
||||
0x3b, 0xd3, 0x54, 0x50, 0x3f, 0xa5, 0x12, 0x2f, 0xa9, 0xb9, 0xb7, 0xac, 0xab, 0xc2, 0xe8, 0x29,
|
||||
0xe8, 0x97, 0x34, 0x65, 0x43, 0xe6, 0x13, 0xc9, 0xe2, 0xc8, 0xfb, 0x4a, 0xc7, 0xb8, 0x96, 0xa3,
|
||||
0xd3, 0xf1, 0x77, 0x74, 0x8c, 0x5e, 0xc1, 0x5a, 0xa2, 0xfc, 0x79, 0x7e, 0x1c, 0x0d, 0x59, 0x80,
|
||||
0x97, 0x5b, 0x9a, 0xd1, 0x68, 0xdf, 0x37, 0xf3, 0xd5, 0x98, 0xe5, 0x6a, 0x4c, 0x57, 0xad, 0xc6,
|
||||
0x59, 0xcd, 0xe9, 0x23, 0x05, 0xa3, 0x87, 0xd0, 0x28, 0xaa, 0x23, 0x12, 0x52, 0xbc, 0xa2, 0x66,
|
||||
0x40, 0x1e, 0xea, 0x93, 0x90, 0xa2, 0x03, 0x58, 0x4a, 0x46, 0x44, 0x50, 0x5c, 0x57, 0xf6, 0x8d,
|
||||
0xf9, 0xf6, 0x55, 0x9d, 0x3d, 0xe1, 0x9d, 0xbc, 0x0c, 0xbd, 0x84, 0x95, 0x24, 0x65, 0x71, 0xca,
|
||||
0xe4, 0x18, 0x83, 0x52, 0xb6, 0xfd, 0x97, 0xb2, 0x5e, 0x24, 0xf7, 0xdb, 0x9f, 0x08, 0xcf, 0xa8,
|
||||
0x73, 0x03, 0xa3, 0x03, 0x58, 0xbf, 0xa0, 0x43, 0x92, 0x71, 0x59, 0x1a, 0xa3, 0xf3, 0x8d, 0xad,
|
||||
0x15, 0x78, 0xe1, 0xec, 0x2d, 0x34, 0x42, 0x22, 0xfd, 0x91, 0x97, 0x66, 0x9c, 0x0a, 0x3c, 0x6c,
|
||||
0x55, 0x8d, 0x46, 0xfb, 0xf1, 0x5c, 0xf9, 0x1f, 0x26, 0xbc, 0x93, 0x71, 0xea, 0x40, 0x58, 0x1e,
|
||||
0x05, 0x7a, 0x06, 0x9b, 0xb3, 0x42, 0xbc, 0x0b, 0x26, 0xc8, 0x80, 0x53, 0x1c, 0xb4, 0x34, 0x63,
|
||||
0xc5, 0xf9, 0x7f, 0x66, 0xee, 0x71, 0x9e, 0xdb, 0xf9, 0xae, 0x41, 0xfd, 0xa6, 0x1f, 0xc2, 0xb0,
|
||||
0xcc, 0x22, 0x35, 0x18, 0x6b, 0xad, 0xaa, 0x51, 0x77, 0xca, 0xeb, 0xe4, 0x09, 0x5e, 0xc4, 0x21,
|
||||
0x61, 0x11, 0xae, 0xa8, 0x44, 0x71, 0x43, 0x16, 0xd4, 0x0a, 0xdb, 0xd5, 0xf9, 0xb6, 0x0b, 0x0c,
|
||||
0x3d, 0x82, 0xf5, 0x3b, 0xf2, 0x16, 0x95, 0xbc, 0x35, 0x7f, 0x5a, 0xd7, 0x6e, 0x17, 0x1a, 0x53,
|
||||
0x5f, 0x09, 0xdd, 0x83, 0x8d, 0xb3, 0xbe, 0x6b, 0x77, 0x8f, 0x7a, 0x6f, 0x7a, 0xdd, 0x63, 0xcf,
|
||||
0x3e, 0xe9, 0xb8, 0x5d, 0x7d, 0x01, 0xd5, 0x61, 0xa9, 0x73, 0x76, 0x7a, 0xd2, 0xd7, 0xb5, 0xf2,
|
||||
0x78, 0xae, 0x57, 0x26, 0x47, 0xf7, 0xb4, 0x73, 0xea, 0xea, 0xd5, 0xdd, 0x43, 0x80, 0xa9, 0xa7,
|
||||
0xbd, 0x09, 0x68, 0xa6, 0xcb, 0xc7, 0xf7, 0xbd, 0xa3, 0x2f, 0xfa, 0x02, 0xd2, 0x61, 0xb5, 0x37,
|
||||
0xec, 0xc7, 0xd2, 0x4e, 0xa9, 0xa0, 0x91, 0xd4, 0x35, 0x04, 0x50, 0xeb, 0xf0, 0x2b, 0x32, 0x16,
|
||||
0x7a, 0xe5, 0xf0, 0xf5, 0x8f, 0xeb, 0xa6, 0xf6, 0xf3, 0xba, 0xa9, 0xfd, 0xba, 0x6e, 0x6a, 0xe7,
|
||||
0xed, 0x80, 0xc9, 0x51, 0x36, 0x30, 0xfd, 0x38, 0xb4, 0x08, 0x67, 0x03, 0x32, 0x20, 0x56, 0xf1,
|
||||
0xb1, 0x2c, 0x92, 0x30, 0xeb, 0x1f, 0xbf, 0x91, 0x41, 0x4d, 0x2d, 0x63, 0xff, 0x4f, 0x00, 0x00,
|
||||
0x00, 0xff, 0xff, 0xb9, 0xf2, 0x67, 0xbe, 0x64, 0x04, 0x00, 0x00,
|
||||
// 631 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xdd, 0x6e, 0xd3, 0x4c,
|
||||
0x10, 0x86, 0xeb, 0xa4, 0x49, 0x9b, 0x49, 0xdb, 0xcf, 0x5d, 0x7d, 0x94, 0x55, 0x8b, 0x42, 0x54,
|
||||
0x09, 0x30, 0x3d, 0xb0, 0xd5, 0x94, 0x9f, 0x13, 0x54, 0x91, 0xb6, 0x81, 0x46, 0x40, 0xb0, 0xec,
|
||||
0x16, 0x44, 0x4f, 0xac, 0x8d, 0xbb, 0x71, 0x56, 0xac, 0x7f, 0xe4, 0x5d, 0xb7, 0xe4, 0xaa, 0xb8,
|
||||
0x0d, 0x0e, 0xb9, 0x04, 0xd4, 0xbb, 0xe0, 0x0c, 0x65, 0xed, 0x34, 0x49, 0x41, 0x39, 0xdb, 0x9d,
|
||||
0x79, 0x66, 0xe6, 0x7d, 0xc7, 0x2b, 0x43, 0x83, 0x7e, 0x93, 0x34, 0x12, 0x2c, 0x8e, 0x84, 0x75,
|
||||
0xb5, 0x4f, 0x78, 0x32, 0x24, 0xfb, 0xd6, 0x35, 0x11, 0xa1, 0x99, 0xa4, 0xb1, 0x8c, 0xd1, 0xce,
|
||||
0x90, 0x05, 0x29, 0x15, 0xc2, 0x9c, 0x72, 0xe6, 0x84, 0xdb, 0x6e, 0x04, 0x71, 0x1c, 0x70, 0x6a,
|
||||
0x29, 0xb4, 0x9f, 0x0d, 0xac, 0xeb, 0x94, 0x24, 0x09, 0x4d, 0x45, 0x5e, 0xbc, 0xfd, 0xe0, 0x6e,
|
||||
0x5e, 0xc8, 0x34, 0xf3, 0x65, 0x9e, 0xdd, 0xfd, 0xbd, 0x0c, 0xf0, 0x99, 0x88, 0xd0, 0xe6, 0x59,
|
||||
0xc0, 0x22, 0xa4, 0x43, 0x39, 0x4b, 0x39, 0x2e, 0x35, 0x35, 0xa3, 0xe6, 0x8c, 0x8f, 0x68, 0x0b,
|
||||
0xaa, 0x62, 0x48, 0x5a, 0xcf, 0x5f, 0xe0, 0xb2, 0x0a, 0x16, 0x37, 0xe4, 0xc2, 0x26, 0x0b, 0x49,
|
||||
0x40, 0xbd, 0x24, 0xe3, 0xdc, 0x4b, 0x62, 0xce, 0xfc, 0x11, 0x5e, 0x6e, 0x6a, 0xc6, 0x46, 0xeb,
|
||||
0x89, 0xb9, 0x40, 0xaf, 0x69, 0x67, 0x9c, 0xdb, 0x0a, 0x77, 0xfe, 0x53, 0x1d, 0xa6, 0x01, 0xb4,
|
||||
0x37, 0xd7, 0x54, 0x50, 0x3f, 0xa5, 0x12, 0x57, 0xd4, 0xdc, 0x29, 0xeb, 0xaa, 0x30, 0x7a, 0x0a,
|
||||
0xfa, 0x15, 0x4d, 0xd9, 0x80, 0xf9, 0x44, 0xb2, 0x38, 0xf2, 0xbe, 0xd2, 0x11, 0xae, 0xe6, 0xe8,
|
||||
0x6c, 0xfc, 0x1d, 0x1d, 0xa1, 0x57, 0xb0, 0x9e, 0x28, 0x7f, 0x9e, 0x1f, 0x47, 0x03, 0x16, 0xe0,
|
||||
0x95, 0xa6, 0x66, 0xd4, 0x5b, 0xf7, 0xcd, 0x7c, 0x35, 0xe6, 0x64, 0x35, 0xa6, 0xab, 0x56, 0xe3,
|
||||
0xac, 0xe5, 0xf4, 0xb1, 0x82, 0xd1, 0x43, 0xa8, 0x17, 0xd5, 0x11, 0x09, 0x29, 0x5e, 0x55, 0x33,
|
||||
0x20, 0x0f, 0xf5, 0x48, 0x48, 0xd1, 0x21, 0x54, 0x92, 0x21, 0x11, 0x14, 0xd7, 0x94, 0x7d, 0x63,
|
||||
0xb1, 0x7d, 0x55, 0x67, 0x8f, 0x79, 0x27, 0x2f, 0x43, 0x2f, 0x61, 0x35, 0x49, 0x59, 0x9c, 0x32,
|
||||
0x39, 0xc2, 0xa0, 0x94, 0xed, 0xfc, 0xa5, 0xac, 0x1b, 0xc9, 0x83, 0xd6, 0x27, 0xc2, 0x33, 0xea,
|
||||
0xdc, 0xc2, 0xe8, 0x10, 0x36, 0x2e, 0xe9, 0x80, 0x64, 0x5c, 0x4e, 0x8c, 0xd1, 0xc5, 0xc6, 0xd6,
|
||||
0x0b, 0xbc, 0x70, 0xf6, 0x16, 0xea, 0x21, 0x91, 0xfe, 0xd0, 0x4b, 0x33, 0x4e, 0x05, 0x1e, 0x34,
|
||||
0xcb, 0x46, 0xbd, 0xf5, 0x78, 0xa1, 0xfc, 0x0f, 0x63, 0xde, 0xc9, 0x38, 0x75, 0x20, 0x9c, 0x1c,
|
||||
0x05, 0x7a, 0x06, 0x5b, 0xf3, 0x42, 0xbc, 0x4b, 0x26, 0x48, 0x9f, 0x53, 0x1c, 0x34, 0x35, 0x63,
|
||||
0xd5, 0xf9, 0x7f, 0x6e, 0xee, 0x49, 0x9e, 0xdb, 0xfd, 0xae, 0x41, 0xed, 0xb6, 0x1f, 0xc2, 0xb0,
|
||||
0xc2, 0x22, 0x35, 0x18, 0x6b, 0xcd, 0xb2, 0x51, 0x73, 0x26, 0xd7, 0xf1, 0x13, 0xbc, 0x8c, 0x43,
|
||||
0xc2, 0x22, 0x5c, 0x52, 0x89, 0xe2, 0x86, 0x2c, 0xa8, 0x16, 0xb6, 0xcb, 0x8b, 0x6d, 0x17, 0x18,
|
||||
0x7a, 0x04, 0x1b, 0x77, 0xe4, 0x2d, 0x2b, 0x79, 0xeb, 0xfe, 0xac, 0xae, 0xb1, 0x12, 0x41, 0xd3,
|
||||
0x2b, 0xe6, 0x53, 0x5c, 0xc9, 0x95, 0x14, 0xd7, 0xbd, 0x0e, 0xd4, 0x67, 0xbe, 0x1f, 0xba, 0x07,
|
||||
0x9b, 0xe7, 0x3d, 0xd7, 0xee, 0x1c, 0x77, 0xdf, 0x74, 0x3b, 0x27, 0x9e, 0x7d, 0xda, 0x76, 0x3b,
|
||||
0xfa, 0x12, 0xaa, 0x41, 0xa5, 0x7d, 0x7e, 0x76, 0xda, 0xd3, 0xb5, 0xc9, 0xf1, 0x42, 0x2f, 0x8d,
|
||||
0x8f, 0xee, 0x59, 0xfb, 0xcc, 0xd5, 0xcb, 0x7b, 0x47, 0x00, 0x33, 0x8f, 0x7e, 0x0b, 0xd0, 0x5c,
|
||||
0x97, 0x8f, 0xef, 0xbb, 0xc7, 0x5f, 0xf4, 0x25, 0xa4, 0xc3, 0x5a, 0x77, 0xd0, 0x8b, 0xa5, 0x9d,
|
||||
0x52, 0x41, 0x23, 0xa9, 0x6b, 0x08, 0xa0, 0xda, 0xe6, 0xd7, 0x64, 0x24, 0xf4, 0xd2, 0xd1, 0xeb,
|
||||
0x1f, 0x37, 0x0d, 0xed, 0xe7, 0x4d, 0x43, 0xfb, 0x75, 0xd3, 0xd0, 0x2e, 0x5a, 0x01, 0x93, 0xc3,
|
||||
0xac, 0x6f, 0xfa, 0x71, 0x68, 0x11, 0xce, 0xfa, 0xa4, 0x4f, 0xac, 0xe2, 0x33, 0x5a, 0x24, 0x61,
|
||||
0xd6, 0x3f, 0x7e, 0x30, 0xfd, 0xaa, 0x5a, 0xd3, 0xc1, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0b,
|
||||
0x3c, 0xc3, 0xcf, 0x7e, 0x04, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *WasmPlugin) Marshal() (dAtA []byte, err error) {
|
||||
@@ -581,6 +590,15 @@ func (m *MatchRule) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if len(m.Service) > 0 {
|
||||
for iNdEx := len(m.Service) - 1; iNdEx >= 0; iNdEx-- {
|
||||
i -= len(m.Service[iNdEx])
|
||||
copy(dAtA[i:], m.Service[iNdEx])
|
||||
i = encodeVarintWasm(dAtA, i, uint64(len(m.Service[iNdEx])))
|
||||
i--
|
||||
dAtA[i] = 0x2a
|
||||
}
|
||||
}
|
||||
if m.ConfigDisable {
|
||||
i--
|
||||
if m.ConfigDisable {
|
||||
@@ -719,6 +737,12 @@ func (m *MatchRule) Size() (n int) {
|
||||
if m.ConfigDisable {
|
||||
n += 2
|
||||
}
|
||||
if len(m.Service) > 0 {
|
||||
for _, s := range m.Service {
|
||||
l = len(s)
|
||||
n += 1 + l + sovWasm(uint64(l))
|
||||
}
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
@@ -1291,6 +1315,38 @@ func (m *MatchRule) Unmarshal(dAtA []byte) error {
|
||||
}
|
||||
}
|
||||
m.ConfigDisable = bool(v != 0)
|
||||
case 5:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field Service", wireType)
|
||||
}
|
||||
var stringLen uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowWasm
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
stringLen |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
intStringLen := int(stringLen)
|
||||
if intStringLen < 0 {
|
||||
return ErrInvalidLengthWasm
|
||||
}
|
||||
postIndex := iNdEx + intStringLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthWasm
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Service = append(m.Service, string(dAtA[iNdEx:postIndex]))
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipWasm(dAtA[iNdEx:])
|
||||
|
||||
@@ -114,6 +114,7 @@ message MatchRule {
|
||||
repeated string domain = 2;
|
||||
google.protobuf.Struct config = 3;
|
||||
bool config_disable = 4;
|
||||
repeated string service = 5;
|
||||
}
|
||||
|
||||
// The phase in the filter chain where the plugin will be injected.
|
||||
|
||||
@@ -64,6 +64,10 @@ spec:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
service:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
type: array
|
||||
phase:
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
|
||||
"github.com/alibaba/higress/pkg/cmd/hgctl/kubernetes"
|
||||
"github.com/alibaba/higress/pkg/cmd/options"
|
||||
"istio.io/istio/istioctl/pkg/writer/envoy/configdump"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
"sigs.k8s.io/yaml"
|
||||
)
|
||||
@@ -61,6 +62,23 @@ func NewDefaultGetEnvoyConfigOptions() *GetEnvoyConfigOptions {
|
||||
}
|
||||
}
|
||||
|
||||
func setupConfigdumpEnvoyConfigWriter(debug []byte, stdout io.Writer) (*configdump.ConfigWriter, error) {
|
||||
cw := &configdump.ConfigWriter{Stdout: stdout}
|
||||
err := cw.Prime(debug)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cw, nil
|
||||
}
|
||||
|
||||
func GetEnvoyConfigWriter(config *GetEnvoyConfigOptions, stdout io.Writer) (*configdump.ConfigWriter, error) {
|
||||
configDump, err := retrieveConfigDump(config.PodName, config.PodNamespace, config.BindAddress, config.IncludeEds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return setupConfigdumpEnvoyConfigWriter(configDump, stdout)
|
||||
}
|
||||
|
||||
func GetEnvoyConfig(config *GetEnvoyConfigOptions) ([]byte, error) {
|
||||
configDump, err := retrieveConfigDump(config.PodName, config.PodNamespace, config.BindAddress, config.IncludeEds)
|
||||
if err != nil {
|
||||
@@ -144,14 +162,12 @@ func formatGatewayConfig(configDump any, output string) ([]byte, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if output == "yaml" {
|
||||
out, err = yaml.JSONToYAML(out)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
|
||||
259
envoy/1.20/patches/envoy/20240610-optimize-xds.patch
Normal file
259
envoy/1.20/patches/envoy/20240610-optimize-xds.patch
Normal file
@@ -0,0 +1,259 @@
|
||||
diff --git a/source/common/router/BUILD b/source/common/router/BUILD
|
||||
index 5c58501..4db76cd 100644
|
||||
--- a/source/common/router/BUILD
|
||||
+++ b/source/common/router/BUILD
|
||||
@@ -212,6 +212,7 @@ envoy_cc_library(
|
||||
"//envoy/router:rds_interface",
|
||||
"//envoy/router:scopes_interface",
|
||||
"//envoy/thread_local:thread_local_interface",
|
||||
+ "//source/common/protobuf:utility_lib",
|
||||
"@envoy_api//envoy/config/route/v3:pkg_cc_proto",
|
||||
"@envoy_api//envoy/extensions/filters/network/http_connection_manager/v3:pkg_cc_proto",
|
||||
],
|
||||
diff --git a/source/common/router/config_impl.cc b/source/common/router/config_impl.cc
|
||||
index ff7b4c8..5ac4523 100644
|
||||
--- a/source/common/router/config_impl.cc
|
||||
+++ b/source/common/router/config_impl.cc
|
||||
@@ -550,19 +550,11 @@ RouteEntryImplBase::RouteEntryImplBase(const VirtualHostImpl& vhost,
|
||||
"not be stripped: {}",
|
||||
path_redirect_);
|
||||
}
|
||||
- ENVOY_LOG(info, "route stats is {}, name is {}", route.stat_prefix(), route.name());
|
||||
if (!route.stat_prefix().empty()) {
|
||||
route_stats_context_ = std::make_unique<RouteStatsContext>(
|
||||
factory_context.scope(), factory_context.routerContext().routeStatNames(), vhost.statName(),
|
||||
route.stat_prefix());
|
||||
- } else if (!route.name().empty()) {
|
||||
- // Added by Ingress
|
||||
- // use route_name as default stat_prefix
|
||||
- route_stats_context_ = std::make_unique<RouteStatsContext>(
|
||||
- factory_context.scope(), factory_context.routerContext().routeStatNames(), vhost.statName(),
|
||||
- route.name());
|
||||
}
|
||||
- // End Added
|
||||
}
|
||||
|
||||
bool RouteEntryImplBase::evaluateRuntimeMatch(const uint64_t random_value) const {
|
||||
@@ -1415,9 +1407,7 @@ VirtualHostImpl::VirtualHostImpl(
|
||||
retry_shadow_buffer_limit_(PROTOBUF_GET_WRAPPED_OR_DEFAULT(
|
||||
virtual_host, per_request_buffer_limit_bytes, std::numeric_limits<uint32_t>::max())),
|
||||
include_attempt_count_in_request_(virtual_host.include_request_attempt_count()),
|
||||
- include_attempt_count_in_response_(virtual_host.include_attempt_count_in_response()),
|
||||
- virtual_cluster_catch_all_(*vcluster_scope_,
|
||||
- factory_context.routerContext().virtualClusterStatNames()) {
|
||||
+ include_attempt_count_in_response_(virtual_host.include_attempt_count_in_response()) {
|
||||
switch (virtual_host.require_tls()) {
|
||||
case envoy::config::route::v3::VirtualHost::NONE:
|
||||
ssl_requirements_ = SslRequirements::None;
|
||||
@@ -1478,10 +1468,14 @@ VirtualHostImpl::VirtualHostImpl(
|
||||
}
|
||||
}
|
||||
|
||||
- for (const auto& virtual_cluster : virtual_host.virtual_clusters()) {
|
||||
- virtual_clusters_.push_back(
|
||||
- VirtualClusterEntry(virtual_cluster, *vcluster_scope_,
|
||||
- factory_context.routerContext().virtualClusterStatNames()));
|
||||
+ if (!virtual_host.virtual_clusters().empty()) {
|
||||
+ virtual_cluster_catch_all_ = std::make_unique<CatchAllVirtualCluster>(
|
||||
+ *vcluster_scope_, factory_context.routerContext().virtualClusterStatNames());
|
||||
+ for (const auto& virtual_cluster : virtual_host.virtual_clusters()) {
|
||||
+ virtual_clusters_.push_back(
|
||||
+ VirtualClusterEntry(virtual_cluster, *vcluster_scope_,
|
||||
+ factory_context.routerContext().virtualClusterStatNames()));
|
||||
+ }
|
||||
}
|
||||
|
||||
if (virtual_host.has_cors()) {
|
||||
@@ -1774,7 +1768,7 @@ VirtualHostImpl::virtualClusterFromEntries(const Http::HeaderMap& headers) const
|
||||
}
|
||||
|
||||
if (!virtual_clusters_.empty()) {
|
||||
- return &virtual_cluster_catch_all_;
|
||||
+ return virtual_cluster_catch_all_.get();
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
diff --git a/source/common/router/config_impl.h b/source/common/router/config_impl.h
|
||||
index cf0ddf3..d83eb94 100644
|
||||
--- a/source/common/router/config_impl.h
|
||||
+++ b/source/common/router/config_impl.h
|
||||
@@ -352,10 +352,10 @@ private:
|
||||
const bool include_attempt_count_in_response_;
|
||||
absl::optional<envoy::config::route::v3::RetryPolicy> retry_policy_;
|
||||
absl::optional<envoy::config::route::v3::HedgePolicy> hedge_policy_;
|
||||
- const CatchAllVirtualCluster virtual_cluster_catch_all_;
|
||||
#if defined(ALIMESH)
|
||||
std::vector<std::string> allow_server_names_;
|
||||
#endif
|
||||
+ std::unique_ptr<const CatchAllVirtualCluster> virtual_cluster_catch_all_;
|
||||
};
|
||||
|
||||
using VirtualHostSharedPtr = std::shared_ptr<VirtualHostImpl>;
|
||||
diff --git a/source/common/router/scoped_config_impl.cc b/source/common/router/scoped_config_impl.cc
|
||||
index 594d571..6482615 100644
|
||||
--- a/source/common/router/scoped_config_impl.cc
|
||||
+++ b/source/common/router/scoped_config_impl.cc
|
||||
@@ -7,6 +7,8 @@
|
||||
#include "source/common/http/header_utility.h"
|
||||
#endif
|
||||
|
||||
+#include "source/common/protobuf/utility.h"
|
||||
+
|
||||
namespace Envoy {
|
||||
namespace Router {
|
||||
|
||||
@@ -239,7 +241,8 @@ HeaderValueExtractorImpl::computeFragment(const Http::HeaderMap& headers) const
|
||||
|
||||
ScopedRouteInfo::ScopedRouteInfo(envoy::config::route::v3::ScopedRouteConfiguration&& config_proto,
|
||||
ConfigConstSharedPtr&& route_config)
|
||||
- : config_proto_(std::move(config_proto)), route_config_(std::move(route_config)) {
|
||||
+ : config_proto_(std::move(config_proto)), route_config_(std::move(route_config)),
|
||||
+ config_hash_(MessageUtil::hash(config_proto)) {
|
||||
// TODO(stevenzzzz): Maybe worth a KeyBuilder abstraction when there are more than one type of
|
||||
// Fragment.
|
||||
for (const auto& fragment : config_proto_.key().fragments()) {
|
||||
diff --git a/source/common/router/scoped_config_impl.h b/source/common/router/scoped_config_impl.h
|
||||
index 9f6a1b2..28e2ee5 100644
|
||||
--- a/source/common/router/scoped_config_impl.h
|
||||
+++ b/source/common/router/scoped_config_impl.h
|
||||
@@ -154,11 +154,13 @@ public:
|
||||
return config_proto_;
|
||||
}
|
||||
const std::string& scopeName() const { return config_proto_.name(); }
|
||||
+ uint64_t configHash() const { return config_hash_; }
|
||||
|
||||
private:
|
||||
envoy::config::route::v3::ScopedRouteConfiguration config_proto_;
|
||||
ScopeKey scope_key_;
|
||||
ConfigConstSharedPtr route_config_;
|
||||
+ const uint64_t config_hash_;
|
||||
};
|
||||
using ScopedRouteInfoConstSharedPtr = std::shared_ptr<const ScopedRouteInfo>;
|
||||
// Ordered map for consistent config dumping.
|
||||
diff --git a/source/common/router/scoped_rds.cc b/source/common/router/scoped_rds.cc
|
||||
index 133e91e..9b2096e 100644
|
||||
--- a/source/common/router/scoped_rds.cc
|
||||
+++ b/source/common/router/scoped_rds.cc
|
||||
@@ -245,6 +245,11 @@ bool ScopedRdsConfigSubscription::addOrUpdateScopes(
|
||||
dynamic_cast<const envoy::config::route::v3::ScopedRouteConfiguration&>(
|
||||
resource.get().resource());
|
||||
const std::string scope_name = scoped_route_config.name();
|
||||
+ if (const auto& scope_info_iter = scoped_route_map_.find(scope_name);
|
||||
+ scope_info_iter != scoped_route_map_.end() &&
|
||||
+ scope_info_iter->second->configHash() == MessageUtil::hash(scoped_route_config)) {
|
||||
+ continue;
|
||||
+ }
|
||||
rds.set_route_config_name(scoped_route_config.route_configuration_name());
|
||||
std::unique_ptr<RdsRouteConfigProviderHelper> rds_config_provider_helper;
|
||||
std::shared_ptr<ScopedRouteInfo> scoped_route_info = nullptr;
|
||||
@@ -398,6 +403,7 @@ void ScopedRdsConfigSubscription::onRdsConfigUpdate(const std::string& scope_nam
|
||||
auto new_scoped_route_info = std::make_shared<ScopedRouteInfo>(
|
||||
envoy::config::route::v3::ScopedRouteConfiguration(iter->second->configProto()),
|
||||
std::move(new_rds_config));
|
||||
+ scoped_route_map_[new_scoped_route_info->scopeName()] = new_scoped_route_info;
|
||||
applyConfigUpdate([new_scoped_route_info](ConfigProvider::ConfigConstSharedPtr config)
|
||||
-> ConfigProvider::ConfigConstSharedPtr {
|
||||
auto* thread_local_scoped_config =
|
||||
diff --git a/source/common/router/scoped_rds.h b/source/common/router/scoped_rds.h
|
||||
index d21d812..a510c1f 100644
|
||||
--- a/source/common/router/scoped_rds.h
|
||||
+++ b/source/common/router/scoped_rds.h
|
||||
@@ -104,7 +104,7 @@ struct ScopedRdsStats {
|
||||
// A scoped RDS subscription to be used with the dynamic scoped RDS ConfigProvider.
|
||||
class ScopedRdsConfigSubscription
|
||||
: public Envoy::Config::DeltaConfigSubscriptionInstance,
|
||||
- Envoy::Config::SubscriptionBase<envoy::config::route::v3::ScopedRouteConfiguration> {
|
||||
+ public Envoy::Config::SubscriptionBase<envoy::config::route::v3::ScopedRouteConfiguration> {
|
||||
public:
|
||||
using ScopedRouteConfigurationMap =
|
||||
std::map<std::string, envoy::config::route::v3::ScopedRouteConfiguration>;
|
||||
diff --git a/test/common/router/scoped_config_impl_test.cc b/test/common/router/scoped_config_impl_test.cc
|
||||
index f63f258..69a2f4b 100644
|
||||
--- a/test/common/router/scoped_config_impl_test.cc
|
||||
+++ b/test/common/router/scoped_config_impl_test.cc
|
||||
@@ -452,6 +452,24 @@ TEST_F(ScopedRouteInfoTest, Creation) {
|
||||
EXPECT_EQ(info_->scopeKey(), makeKey({"foo", "bar"}));
|
||||
}
|
||||
|
||||
+// Tests that config hash changes if ScopedRouteConfiguration of the ScopedRouteInfo changes.
|
||||
+TEST_F(ScopedRouteInfoTest, Hash) {
|
||||
+ const envoy::config::route::v3::ScopedRouteConfiguration config_copy = scoped_route_config_;
|
||||
+ info_ = std::make_unique<ScopedRouteInfo>(scoped_route_config_, route_config_);
|
||||
+ EXPECT_EQ(info_->routeConfig().get(), route_config_.get());
|
||||
+ EXPECT_TRUE(TestUtility::protoEqual(info_->configProto(), config_copy));
|
||||
+ EXPECT_EQ(info_->scopeName(), "foo_scope");
|
||||
+ EXPECT_EQ(info_->scopeKey(), makeKey({"foo", "bar"}));
|
||||
+
|
||||
+ const auto info2 = std::make_unique<ScopedRouteInfo>(scoped_route_config_, route_config_);
|
||||
+ ASSERT_EQ(info2->configHash(), info_->configHash());
|
||||
+
|
||||
+ // Mutate the config and hash should be different now.
|
||||
+ scoped_route_config_.set_on_demand(true);
|
||||
+ const auto info3 = std::make_unique<ScopedRouteInfo>(scoped_route_config_, route_config_);
|
||||
+ ASSERT_NE(info3->configHash(), info_->configHash());
|
||||
+}
|
||||
+
|
||||
class ScopedConfigImplTest : public testing::Test {
|
||||
public:
|
||||
void SetUp() override {
|
||||
diff --git a/test/common/router/scoped_rds_test.cc b/test/common/router/scoped_rds_test.cc
|
||||
index 09b96a6..b4776c9 100644
|
||||
--- a/test/common/router/scoped_rds_test.cc
|
||||
+++ b/test/common/router/scoped_rds_test.cc
|
||||
@@ -13,6 +13,7 @@
|
||||
#include "envoy/stats/scope.h"
|
||||
|
||||
#include "source/common/config/api_version.h"
|
||||
+#include "source/common/config/config_provider_impl.h"
|
||||
#include "source/common/config/grpc_mux_impl.h"
|
||||
#include "source/common/protobuf/message_validator_impl.h"
|
||||
#include "source/common/router/scoped_rds.h"
|
||||
@@ -365,6 +366,48 @@ key:
|
||||
"Didn't find a registered implementation for name: 'filter.unknown'");
|
||||
}
|
||||
|
||||
+// Test that scopes with same config as existing scopes will be skipped in a config push.
|
||||
+TEST_F(ScopedRdsTest, UnchangedScopesAreSkipped) {
|
||||
+ setup();
|
||||
+ init_watcher_.expectReady();
|
||||
+ const std::string config_yaml = R"EOF(
|
||||
+name: foo_scope
|
||||
+route_configuration_name: foo_routes
|
||||
+key:
|
||||
+ fragments:
|
||||
+ - string_key: x-foo-key
|
||||
+)EOF";
|
||||
+ const auto resource = parseScopedRouteConfigurationFromYaml(config_yaml);
|
||||
+ const std::string config_yaml2 = R"EOF(
|
||||
+name: foo_scope2
|
||||
+route_configuration_name: foo_routes
|
||||
+key:
|
||||
+ fragments:
|
||||
+ - string_key: x-bar-key
|
||||
+)EOF";
|
||||
+ const auto resource_2 = parseScopedRouteConfigurationFromYaml(config_yaml2);
|
||||
+
|
||||
+ // Delta API.
|
||||
+ const auto decoded_resources = TestUtility::decodeResources({resource, resource_2});
|
||||
+ context_init_manager_.initialize(init_watcher_);
|
||||
+ EXPECT_NO_THROW(srds_subscription_->onConfigUpdate(decoded_resources.refvec_, {}, "v1"));
|
||||
+ EXPECT_EQ(1UL,
|
||||
+ server_factory_context_.scope_.counter("foo.scoped_rds.foo_scoped_routes.config_reload")
|
||||
+ .value());
|
||||
+ EXPECT_EQ(2UL, all_scopes_.value());
|
||||
+ pushRdsConfig({"foo_routes"}, "111");
|
||||
+ Envoy::Router::ScopedRdsConfigSubscription* srds_delta_subscription =
|
||||
+ static_cast<Envoy::Router::ScopedRdsConfigSubscription*>(srds_subscription_);
|
||||
+ ASSERT_NE(srds_delta_subscription, nullptr);
|
||||
+ ASSERT_EQ("v1", srds_delta_subscription->configInfo()->last_config_version_);
|
||||
+ // Push again the same set of config with different version number, the config will be skipped.
|
||||
+ EXPECT_NO_THROW(srds_subscription_->onConfigUpdate(decoded_resources.refvec_, {}, "123"));
|
||||
+ ASSERT_EQ("v1", srds_delta_subscription->configInfo()->last_config_version_);
|
||||
+ EXPECT_EQ(2UL,
|
||||
+ server_factory_context_.scope_.counter("foo.scoped_rds.foo_scoped_routes.config_reload")
|
||||
+ .value());
|
||||
+}
|
||||
+
|
||||
// Test ignoring the optional unknown factory in the per-virtualhost typed config.
|
||||
TEST_F(ScopedRdsTest, OptionalUnknownFactoryForPerVirtualHostTypedConfig) {
|
||||
OptionalHttpFilters optional_http_filters;
|
||||
@@ -0,0 +1,13 @@
|
||||
diff --git a/source/common/http/headers.h b/source/common/http/headers.h
|
||||
index a7a8a3393e..6af4a2852d 100644
|
||||
--- a/source/common/http/headers.h
|
||||
+++ b/source/common/http/headers.h
|
||||
@@ -123,7 +123,7 @@ public:
|
||||
const LowerCaseString TriCostTime{"req-cost-time"};
|
||||
const LowerCaseString TriStartTime{"req-start-time"};
|
||||
const LowerCaseString TriRespStartTime{"resp-start-time"};
|
||||
- const LowerCaseString EnvoyOriginalHost{"original-host"};
|
||||
+ const LowerCaseString EnvoyOriginalHost{"x-envoy-original-host"};
|
||||
const LowerCaseString HigressOriginalService{"x-higress-original-service"};
|
||||
} AliExtendedValues;
|
||||
#endif
|
||||
43
envoy/1.20/patches/envoy/20240725-set-buffer-limit.patch
Normal file
43
envoy/1.20/patches/envoy/20240725-set-buffer-limit.patch
Normal file
@@ -0,0 +1,43 @@
|
||||
diff --git a/source/extensions/common/wasm/context.cc b/source/extensions/common/wasm/context.cc
|
||||
index 9642d8abd3..410baa856f 100644
|
||||
--- a/source/extensions/common/wasm/context.cc
|
||||
+++ b/source/extensions/common/wasm/context.cc
|
||||
@@ -62,6 +62,21 @@ constexpr absl::string_view CelStateKeyPrefix = "wasm.";
|
||||
#if defined(ALIMESH)
|
||||
constexpr std::string_view ClearRouteCacheKey = "clear_route_cache";
|
||||
constexpr std::string_view DisableClearRouteCache = "off";
|
||||
+constexpr std::string_view SetDecoderBufferLimit = "set_decoder_buffer_limit";
|
||||
+constexpr std::string_view SetEncoderBufferLimit = "set_encoder_buffer_limit";
|
||||
+
|
||||
+bool stringViewToUint32(std::string_view str, uint32_t& out_value) {
|
||||
+ try {
|
||||
+ unsigned long temp = std::stoul(std::string(str));
|
||||
+ if (temp <= std::numeric_limits<uint32_t>::max()) {
|
||||
+ out_value = static_cast<uint32_t>(temp);
|
||||
+ return true;
|
||||
+ }
|
||||
+ } catch (const std::exception& e) {
|
||||
+ ENVOY_LOG_MISC(critical, "stringToUint exception '{}'", e.what());
|
||||
+ }
|
||||
+ return false;
|
||||
+}
|
||||
#endif
|
||||
|
||||
using HashPolicy = envoy::config::route::v3::RouteAction::HashPolicy;
|
||||
@@ -1280,6 +1295,16 @@ WasmResult Context::setProperty(std::string_view path, std::string_view value) {
|
||||
} else {
|
||||
disable_clear_route_cache_ = false;
|
||||
}
|
||||
+ } else if (path == SetDecoderBufferLimit && decoder_callbacks_) {
|
||||
+ uint32_t buffer_limit;
|
||||
+ if (stringViewToUint32(value, buffer_limit)) {
|
||||
+ decoder_callbacks_->setDecoderBufferLimit(buffer_limit);
|
||||
+ }
|
||||
+ } else if (path == SetEncoderBufferLimit && encoder_callbacks_) {
|
||||
+ uint32_t buffer_limit;
|
||||
+ if (stringViewToUint32(value, buffer_limit)) {
|
||||
+ encoder_callbacks_->setEncoderBufferLimit(buffer_limit);
|
||||
+ }
|
||||
}
|
||||
#endif
|
||||
if (!state->setValue(toAbslStringView(value))) {
|
||||
106
envoy/1.20/patches/envoy/20240726-custom-span-tag.patch
Normal file
106
envoy/1.20/patches/envoy/20240726-custom-span-tag.patch
Normal file
@@ -0,0 +1,106 @@
|
||||
diff --git a/envoy/stream_info/stream_info.h b/envoy/stream_info/stream_info.h
|
||||
index c6d82db4f4..09717673b0 100644
|
||||
--- a/envoy/stream_info/stream_info.h
|
||||
+++ b/envoy/stream_info/stream_info.h
|
||||
@@ -613,7 +613,21 @@ public:
|
||||
* @return the number of times the request was attempted upstream, absl::nullopt if the request
|
||||
* was never attempted upstream.
|
||||
*/
|
||||
+
|
||||
virtual absl::optional<uint32_t> attemptCount() const PURE;
|
||||
+
|
||||
+#ifdef ALIMESH
|
||||
+ /**
|
||||
+ * @param key the filter state key set by wasm filter.
|
||||
+ * @param value the filter state value set by wasm filter.
|
||||
+ */
|
||||
+ virtual void setCustomSpanTag(const std::string& key, const std::string& value) PURE;
|
||||
+
|
||||
+ /**
|
||||
+ * @return the key-value map of filter states set by wasm filter.
|
||||
+ */
|
||||
+ virtual const std::unordered_map<std::string, std::string>& getCustomSpanTagMap() const PURE;
|
||||
+#endif
|
||||
};
|
||||
|
||||
} // namespace StreamInfo
|
||||
diff --git a/source/common/stream_info/stream_info_impl.h b/source/common/stream_info/stream_info_impl.h
|
||||
index 6ce2afe773..d5e7a80b37 100644
|
||||
--- a/source/common/stream_info/stream_info_impl.h
|
||||
+++ b/source/common/stream_info/stream_info_impl.h
|
||||
@@ -291,6 +291,20 @@ struct StreamInfoImpl : public StreamInfo {
|
||||
|
||||
absl::optional<uint32_t> attemptCount() const override { return attempt_count_; }
|
||||
|
||||
+#ifdef ALIMESH
|
||||
+ void setCustomSpanTag(const std::string& key, const std::string& value) override {
|
||||
+ auto it = custom_span_tags_.find(key);
|
||||
+ if (it != custom_span_tags_.end()) {
|
||||
+ it->second = value;
|
||||
+ } else {
|
||||
+ custom_span_tags_.emplace(key, value);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ const std::unordered_map<std::string, std::string>& getCustomSpanTagMap() const override {
|
||||
+ return custom_span_tags_;
|
||||
+ }
|
||||
+#endif
|
||||
TimeSource& time_source_;
|
||||
const SystemTime start_time_;
|
||||
const MonotonicTime start_time_monotonic_;
|
||||
@@ -350,6 +364,9 @@ private:
|
||||
absl::optional<Upstream::ClusterInfoConstSharedPtr> upstream_cluster_info_;
|
||||
std::string filter_chain_name_;
|
||||
Tracing::Reason trace_reason_;
|
||||
+#ifdef ALIMESH
|
||||
+ std::unordered_map<std::string, std::string> custom_span_tags_;
|
||||
+#endif
|
||||
};
|
||||
|
||||
} // namespace StreamInfo
|
||||
diff --git a/source/common/tracing/http_tracer_impl.cc b/source/common/tracing/http_tracer_impl.cc
|
||||
index e55cf00e0a..f94e9101d7 100644
|
||||
--- a/source/common/tracing/http_tracer_impl.cc
|
||||
+++ b/source/common/tracing/http_tracer_impl.cc
|
||||
@@ -214,6 +214,14 @@ void HttpTracerUtility::setCommonTags(Span& span, const Http::ResponseHeaderMap*
|
||||
|
||||
span.setTag(Tracing::Tags::get().Component, Tracing::Tags::get().Proxy);
|
||||
|
||||
+#ifdef ALIMESH
|
||||
+ // Wasm filter state
|
||||
+ const auto& custom_span_tags = stream_info.getCustomSpanTagMap();
|
||||
+ for (const auto& it : custom_span_tags) {
|
||||
+ span.setTag(it.first, it.second);
|
||||
+ }
|
||||
+#endif
|
||||
+
|
||||
if (nullptr != stream_info.upstreamHost()) {
|
||||
span.setTag(Tracing::Tags::get().UpstreamCluster, stream_info.upstreamHost()->cluster().name());
|
||||
span.setTag(Tracing::Tags::get().UpstreamClusterName,
|
||||
diff --git a/source/extensions/common/wasm/context.cc b/source/extensions/common/wasm/context.cc
|
||||
index 410baa856f..b11ecf1cd6 100644
|
||||
--- a/source/extensions/common/wasm/context.cc
|
||||
+++ b/source/extensions/common/wasm/context.cc
|
||||
@@ -60,6 +60,7 @@ namespace {
|
||||
constexpr absl::string_view CelStateKeyPrefix = "wasm.";
|
||||
|
||||
#if defined(ALIMESH)
|
||||
+constexpr absl::string_view CustomeTraceSpanTagPrefix = "trace_span_tag.";
|
||||
constexpr std::string_view ClearRouteCacheKey = "clear_route_cache";
|
||||
constexpr std::string_view DisableClearRouteCache = "off";
|
||||
constexpr std::string_view SetDecoderBufferLimit = "set_decoder_buffer_limit";
|
||||
@@ -1271,6 +1272,13 @@ WasmResult Context::setProperty(std::string_view path, std::string_view value) {
|
||||
if (!stream_info) {
|
||||
return WasmResult::NotFound;
|
||||
}
|
||||
+#ifdef ALIMESH
|
||||
+ if (absl::StartsWith(absl::string_view{path.data(), path.size()}, CustomeTraceSpanTagPrefix)) {
|
||||
+ stream_info->setCustomSpanTag(std::string(path.substr(CustomeTraceSpanTagPrefix.size())),
|
||||
+ std::string(value));
|
||||
+ return WasmResult::Ok;
|
||||
+ }
|
||||
+#endif
|
||||
std::string key;
|
||||
absl::StrAppend(&key, CelStateKeyPrefix, toAbslStringView(path));
|
||||
CelState* state;
|
||||
341
get_helm.sh
Executable file
341
get_helm.sh
Executable file
@@ -0,0 +1,341 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright The Helm Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# The install script is based off of the MIT-licensed script from glide,
|
||||
# the package manager for Go: https://github.com/Masterminds/glide.sh/blob/master/get
|
||||
|
||||
: ${BINARY_NAME:="helm"}
|
||||
: ${USE_SUDO:="true"}
|
||||
: ${DEBUG:="false"}
|
||||
: ${VERIFY_CHECKSUM:="true"}
|
||||
: ${VERIFY_SIGNATURES:="false"}
|
||||
: ${HELM_INSTALL_DIR:="/usr/local/bin"}
|
||||
: ${GPG_PUBRING:="pubring.kbx"}
|
||||
|
||||
HAS_CURL="$(type "curl" &> /dev/null && echo true || echo false)"
|
||||
HAS_WGET="$(type "wget" &> /dev/null && echo true || echo false)"
|
||||
HAS_OPENSSL="$(type "openssl" &> /dev/null && echo true || echo false)"
|
||||
HAS_GPG="$(type "gpg" &> /dev/null && echo true || echo false)"
|
||||
HAS_GIT="$(type "git" &> /dev/null && echo true || echo false)"
|
||||
|
||||
# initArch discovers the architecture for this system.
|
||||
initArch() {
|
||||
ARCH=$(uname -m)
|
||||
case $ARCH in
|
||||
armv5*) ARCH="armv5";;
|
||||
armv6*) ARCH="armv6";;
|
||||
armv7*) ARCH="arm";;
|
||||
aarch64) ARCH="arm64";;
|
||||
x86) ARCH="386";;
|
||||
x86_64) ARCH="amd64";;
|
||||
i686) ARCH="386";;
|
||||
i386) ARCH="386";;
|
||||
esac
|
||||
}
|
||||
|
||||
# initOS discovers the operating system for this system.
|
||||
initOS() {
|
||||
OS=$(echo `uname`|tr '[:upper:]' '[:lower:]')
|
||||
|
||||
case "$OS" in
|
||||
# Minimalist GNU for Windows
|
||||
mingw*|cygwin*) OS='windows';;
|
||||
esac
|
||||
}
|
||||
|
||||
# runs the given command as root (detects if we are root already)
|
||||
runAsRoot() {
|
||||
if [ $EUID -ne 0 -a "$USE_SUDO" = "true" ]; then
|
||||
sudo "${@}"
|
||||
else
|
||||
"${@}"
|
||||
fi
|
||||
}
|
||||
|
||||
# verifySupported checks that the os/arch combination is supported for
|
||||
# binary builds, as well whether or not necessary tools are present.
|
||||
verifySupported() {
|
||||
local supported="darwin-amd64\ndarwin-arm64\nlinux-386\nlinux-amd64\nlinux-arm\nlinux-arm64\nlinux-ppc64le\nlinux-s390x\nlinux-riscv64\nwindows-amd64\nwindows-arm64"
|
||||
if ! echo "${supported}" | grep -q "${OS}-${ARCH}"; then
|
||||
echo "No prebuilt binary for ${OS}-${ARCH}."
|
||||
echo "To build from source, go to https://github.com/helm/helm"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${HAS_CURL}" != "true" ] && [ "${HAS_WGET}" != "true" ]; then
|
||||
echo "Either curl or wget is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${VERIFY_CHECKSUM}" == "true" ] && [ "${HAS_OPENSSL}" != "true" ]; then
|
||||
echo "In order to verify checksum, openssl must first be installed."
|
||||
echo "Please install openssl or set VERIFY_CHECKSUM=false in your environment."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${VERIFY_SIGNATURES}" == "true" ]; then
|
||||
if [ "${HAS_GPG}" != "true" ]; then
|
||||
echo "In order to verify signatures, gpg must first be installed."
|
||||
echo "Please install gpg or set VERIFY_SIGNATURES=false in your environment."
|
||||
exit 1
|
||||
fi
|
||||
if [ "${OS}" != "linux" ]; then
|
||||
echo "Signature verification is currently only supported on Linux."
|
||||
echo "Please set VERIFY_SIGNATURES=false or verify the signatures manually."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "${HAS_GIT}" != "true" ]; then
|
||||
echo "[WARNING] Could not find git. It is required for plugin installation."
|
||||
fi
|
||||
}
|
||||
|
||||
# checkDesiredVersion checks if the desired version is available.
|
||||
checkDesiredVersion() {
|
||||
if [ "x$DESIRED_VERSION" == "x" ]; then
|
||||
# Get tag from release URL
|
||||
local latest_release_url="https://get.helm.sh/helm-latest-version"
|
||||
local latest_release_response=""
|
||||
if [ "${HAS_CURL}" == "true" ]; then
|
||||
latest_release_response=$( curl -L --silent --show-error --fail "$latest_release_url" 2>&1 || true )
|
||||
elif [ "${HAS_WGET}" == "true" ]; then
|
||||
latest_release_response=$( wget "$latest_release_url" -q -O - 2>&1 || true )
|
||||
fi
|
||||
TAG=$( echo "$latest_release_response" | grep '^v[0-9]' )
|
||||
if [ "x$TAG" == "x" ]; then
|
||||
printf "Could not retrieve the latest release tag information from %s: %s\n" "${latest_release_url}" "${latest_release_response}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
TAG=$DESIRED_VERSION
|
||||
fi
|
||||
}
|
||||
|
||||
# checkHelmInstalledVersion checks which version of helm is installed and
|
||||
# if it needs to be changed.
|
||||
checkHelmInstalledVersion() {
|
||||
if [[ -f "${HELM_INSTALL_DIR}/${BINARY_NAME}" ]]; then
|
||||
local version=$("${HELM_INSTALL_DIR}/${BINARY_NAME}" version --template="{{ .Version }}")
|
||||
if [[ "$version" == "$TAG" ]]; then
|
||||
echo "Helm ${version} is already ${DESIRED_VERSION:-latest}"
|
||||
return 0
|
||||
else
|
||||
echo "Helm ${TAG} is available. Changing from version ${version}."
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# downloadFile downloads the latest binary package and also the checksum
|
||||
# for that binary.
|
||||
downloadFile() {
|
||||
HELM_DIST="helm-$TAG-$OS-$ARCH.tar.gz"
|
||||
DOWNLOAD_URL="https://get.helm.sh/$HELM_DIST"
|
||||
CHECKSUM_URL="$DOWNLOAD_URL.sha256"
|
||||
HELM_TMP_ROOT="$(mktemp -dt helm-installer-XXXXXX)"
|
||||
HELM_TMP_FILE="$HELM_TMP_ROOT/$HELM_DIST"
|
||||
HELM_SUM_FILE="$HELM_TMP_ROOT/$HELM_DIST.sha256"
|
||||
echo "Downloading $DOWNLOAD_URL"
|
||||
if [ "${HAS_CURL}" == "true" ]; then
|
||||
curl -SsL "$CHECKSUM_URL" -o "$HELM_SUM_FILE"
|
||||
curl -SsL "$DOWNLOAD_URL" -o "$HELM_TMP_FILE"
|
||||
elif [ "${HAS_WGET}" == "true" ]; then
|
||||
wget -q -O "$HELM_SUM_FILE" "$CHECKSUM_URL"
|
||||
wget -q -O "$HELM_TMP_FILE" "$DOWNLOAD_URL"
|
||||
fi
|
||||
}
|
||||
|
||||
# verifyFile verifies the SHA256 checksum of the binary package
|
||||
# and the GPG signatures for both the package and checksum file
|
||||
# (depending on settings in environment).
|
||||
verifyFile() {
|
||||
if [ "${VERIFY_CHECKSUM}" == "true" ]; then
|
||||
verifyChecksum
|
||||
fi
|
||||
if [ "${VERIFY_SIGNATURES}" == "true" ]; then
|
||||
verifySignatures
|
||||
fi
|
||||
}
|
||||
|
||||
# installFile installs the Helm binary.
|
||||
installFile() {
|
||||
HELM_TMP="$HELM_TMP_ROOT/$BINARY_NAME"
|
||||
mkdir -p "$HELM_TMP"
|
||||
tar xf "$HELM_TMP_FILE" -C "$HELM_TMP"
|
||||
HELM_TMP_BIN="$HELM_TMP/$OS-$ARCH/helm"
|
||||
echo "Preparing to install $BINARY_NAME into ${HELM_INSTALL_DIR}"
|
||||
runAsRoot cp "$HELM_TMP_BIN" "$HELM_INSTALL_DIR/$BINARY_NAME"
|
||||
echo "$BINARY_NAME installed into $HELM_INSTALL_DIR/$BINARY_NAME"
|
||||
}
|
||||
|
||||
# verifyChecksum verifies the SHA256 checksum of the binary package.
|
||||
verifyChecksum() {
|
||||
printf "Verifying checksum... "
|
||||
local sum=$(openssl sha1 -sha256 ${HELM_TMP_FILE} | awk '{print $2}')
|
||||
local expected_sum=$(cat ${HELM_SUM_FILE})
|
||||
if [ "$sum" != "$expected_sum" ]; then
|
||||
echo "SHA sum of ${HELM_TMP_FILE} does not match. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
echo "Done."
|
||||
}
|
||||
|
||||
# verifySignatures obtains the latest KEYS file from GitHub main branch
|
||||
# as well as the signature .asc files from the specific GitHub release,
|
||||
# then verifies that the release artifacts were signed by a maintainer's key.
|
||||
verifySignatures() {
|
||||
printf "Verifying signatures... "
|
||||
local keys_filename="KEYS"
|
||||
local github_keys_url="https://raw.githubusercontent.com/helm/helm/main/${keys_filename}"
|
||||
if [ "${HAS_CURL}" == "true" ]; then
|
||||
curl -SsL "${github_keys_url}" -o "${HELM_TMP_ROOT}/${keys_filename}"
|
||||
elif [ "${HAS_WGET}" == "true" ]; then
|
||||
wget -q -O "${HELM_TMP_ROOT}/${keys_filename}" "${github_keys_url}"
|
||||
fi
|
||||
local gpg_keyring="${HELM_TMP_ROOT}/keyring.gpg"
|
||||
local gpg_homedir="${HELM_TMP_ROOT}/gnupg"
|
||||
mkdir -p -m 0700 "${gpg_homedir}"
|
||||
local gpg_stderr_device="/dev/null"
|
||||
if [ "${DEBUG}" == "true" ]; then
|
||||
gpg_stderr_device="/dev/stderr"
|
||||
fi
|
||||
gpg --batch --quiet --homedir="${gpg_homedir}" --import "${HELM_TMP_ROOT}/${keys_filename}" 2> "${gpg_stderr_device}"
|
||||
gpg --batch --no-default-keyring --keyring "${gpg_homedir}/${GPG_PUBRING}" --export > "${gpg_keyring}"
|
||||
local github_release_url="https://github.com/helm/helm/releases/download/${TAG}"
|
||||
if [ "${HAS_CURL}" == "true" ]; then
|
||||
curl -SsL "${github_release_url}/helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256.asc" -o "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256.asc"
|
||||
curl -SsL "${github_release_url}/helm-${TAG}-${OS}-${ARCH}.tar.gz.asc" -o "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.asc"
|
||||
elif [ "${HAS_WGET}" == "true" ]; then
|
||||
wget -q -O "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256.asc" "${github_release_url}/helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256.asc"
|
||||
wget -q -O "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.asc" "${github_release_url}/helm-${TAG}-${OS}-${ARCH}.tar.gz.asc"
|
||||
fi
|
||||
local error_text="If you think this might be a potential security issue,"
|
||||
error_text="${error_text}\nplease see here: https://github.com/helm/community/blob/master/SECURITY.md"
|
||||
local num_goodlines_sha=$(gpg --verify --keyring="${gpg_keyring}" --status-fd=1 "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256.asc" 2> "${gpg_stderr_device}" | grep -c -E '^\[GNUPG:\] (GOODSIG|VALIDSIG)')
|
||||
if [[ ${num_goodlines_sha} -lt 2 ]]; then
|
||||
echo "Unable to verify the signature of helm-${TAG}-${OS}-${ARCH}.tar.gz.sha256!"
|
||||
echo -e "${error_text}"
|
||||
exit 1
|
||||
fi
|
||||
local num_goodlines_tar=$(gpg --verify --keyring="${gpg_keyring}" --status-fd=1 "${HELM_TMP_ROOT}/helm-${TAG}-${OS}-${ARCH}.tar.gz.asc" 2> "${gpg_stderr_device}" | grep -c -E '^\[GNUPG:\] (GOODSIG|VALIDSIG)')
|
||||
if [[ ${num_goodlines_tar} -lt 2 ]]; then
|
||||
echo "Unable to verify the signature of helm-${TAG}-${OS}-${ARCH}.tar.gz!"
|
||||
echo -e "${error_text}"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done."
|
||||
}
|
||||
|
||||
# fail_trap is executed if an error occurs.
|
||||
fail_trap() {
|
||||
result=$?
|
||||
if [ "$result" != "0" ]; then
|
||||
if [[ -n "$INPUT_ARGUMENTS" ]]; then
|
||||
echo "Failed to install $BINARY_NAME with the arguments provided: $INPUT_ARGUMENTS"
|
||||
help
|
||||
else
|
||||
echo "Failed to install $BINARY_NAME"
|
||||
fi
|
||||
echo -e "\tFor support, go to https://github.com/helm/helm."
|
||||
fi
|
||||
cleanup
|
||||
exit $result
|
||||
}
|
||||
|
||||
# testVersion tests the installed client to make sure it is working.
|
||||
testVersion() {
|
||||
set +e
|
||||
HELM="$(command -v $BINARY_NAME)"
|
||||
if [ "$?" = "1" ]; then
|
||||
echo "$BINARY_NAME not found. Is $HELM_INSTALL_DIR on your "'$PATH?'
|
||||
exit 1
|
||||
fi
|
||||
set -e
|
||||
}
|
||||
|
||||
# help provides possible cli installation arguments
|
||||
help () {
|
||||
echo "Accepted cli arguments are:"
|
||||
echo -e "\t[--help|-h ] ->> prints this help"
|
||||
echo -e "\t[--version|-v <desired_version>] . When not defined it fetches the latest release from GitHub"
|
||||
echo -e "\te.g. --version v3.0.0 or -v canary"
|
||||
echo -e "\t[--no-sudo] ->> install without sudo"
|
||||
}
|
||||
|
||||
# cleanup temporary files to avoid https://github.com/helm/helm/issues/2977
|
||||
cleanup() {
|
||||
if [[ -d "${HELM_TMP_ROOT:-}" ]]; then
|
||||
rm -rf "$HELM_TMP_ROOT"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execution
|
||||
|
||||
#Stop execution on any error
|
||||
trap "fail_trap" EXIT
|
||||
set -e
|
||||
|
||||
# Set debug if desired
|
||||
if [ "${DEBUG}" == "true" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
# Parsing input arguments (if any)
|
||||
export INPUT_ARGUMENTS="${@}"
|
||||
set -u
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
'--version'|-v)
|
||||
shift
|
||||
if [[ $# -ne 0 ]]; then
|
||||
export DESIRED_VERSION="${1}"
|
||||
if [[ "$1" != "v"* ]]; then
|
||||
echo "Expected version arg ('${DESIRED_VERSION}') to begin with 'v', fixing..."
|
||||
export DESIRED_VERSION="v${1}"
|
||||
fi
|
||||
else
|
||||
echo -e "Please provide the desired version. e.g. --version v3.0.0 or -v canary"
|
||||
exit 0
|
||||
fi
|
||||
;;
|
||||
'--no-sudo')
|
||||
USE_SUDO="false"
|
||||
;;
|
||||
'--help'|-h)
|
||||
help
|
||||
exit 0
|
||||
;;
|
||||
*) exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
set +u
|
||||
|
||||
initArch
|
||||
initOS
|
||||
verifySupported
|
||||
checkDesiredVersion
|
||||
if ! checkHelmInstalledVersion; then
|
||||
downloadFile
|
||||
verifyFile
|
||||
installFile
|
||||
fi
|
||||
testVersion
|
||||
cleanup
|
||||
4
go.mod
4
go.mod
@@ -255,7 +255,6 @@ require (
|
||||
go.opentelemetry.io/proto/otlp v0.12.0 // indirect
|
||||
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.24.0 // indirect
|
||||
golang.org/x/crypto v0.17.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20230321023759-10a507213a29 // indirect
|
||||
golang.org/x/mod v0.11.0 // indirect
|
||||
@@ -304,7 +303,7 @@ replace istio.io/client-go => ./external/client-go
|
||||
|
||||
replace istio.io/istio => ./external/istio
|
||||
|
||||
replace github.com/caddyserver/certmagic => github.com/2456868764/certmagic v1.0.1
|
||||
replace github.com/caddyserver/certmagic => github.com/2456868764/certmagic v1.0.2
|
||||
|
||||
require (
|
||||
github.com/caddyserver/certmagic v0.20.0
|
||||
@@ -313,6 +312,7 @@ require (
|
||||
github.com/kylelemons/godebug v1.1.0
|
||||
github.com/mholt/acmez v1.2.0
|
||||
github.com/tidwall/gjson v1.17.0
|
||||
go.uber.org/zap v1.24.0
|
||||
golang.org/x/net v0.17.0
|
||||
helm.sh/helm/v3 v3.7.1
|
||||
k8s.io/apiextensions-apiserver v0.25.4
|
||||
|
||||
4
go.sum
4
go.sum
@@ -61,8 +61,8 @@ dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBr
|
||||
dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4=
|
||||
dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU=
|
||||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
github.com/2456868764/certmagic v1.0.1 h1:dRzow2Npe9llFTBhNVl0fVe8Yi/Q14ygNonlaZUyDZQ=
|
||||
github.com/2456868764/certmagic v1.0.1/go.mod h1:LOn81EQYMPajdew6Ln6SVdHPxPqPv6jwsUg92kiNlcQ=
|
||||
github.com/2456868764/certmagic v1.0.2 h1:xYoN4z6seONwT85llWXZcASvQME8TOSiSWQvLJsGGsE=
|
||||
github.com/2456868764/certmagic v1.0.2/go.mod h1:LOn81EQYMPajdew6Ln6SVdHPxPqPv6jwsUg92kiNlcQ=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20210929163055-e81b3f25be97/go.mod h1:WpB7kf89yJUETZxQnP1kgYPNwlT2jjdDYUCoxVggM3g=
|
||||
github.com/AlecAivazis/survey/v2 v2.3.6 h1:NvTuVHISgTHEHeBFqt6BHOe4Ny/NwGZr7w+F8S9ziyw=
|
||||
github.com/AlecAivazis/survey/v2 v2.3.6/go.mod h1:4AuI9b7RjAR+G7v9+C4YSlX/YL3K3cWNXgWXOhllqvI=
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
apiVersion: v2
|
||||
appVersion: 1.4.0
|
||||
appVersion: 1.4.2
|
||||
description: Helm chart for deploying higress gateways
|
||||
icon: https://higress.io/img/higress_logo_small.png
|
||||
home: http://higress.io/
|
||||
@@ -10,4 +10,4 @@ name: higress-core
|
||||
sources:
|
||||
- http://github.com/alibaba/higress
|
||||
type: application
|
||||
version: 1.4.0
|
||||
version: 1.4.2
|
||||
|
||||
@@ -97,7 +97,7 @@ higress: {{ include "controller.name" . }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "skywalking.enabled" -}}
|
||||
{{- if and .Values.skywalking.enabled .Values.skywalking.service.address }}
|
||||
{{- if and (hasKey .Values "tracing") .Values.tracing.enable (hasKey .Values.tracing "skywalking") .Values.tracing.skywalking.service }}
|
||||
true
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
@@ -46,10 +46,6 @@
|
||||
address: {{ .Values.global.tracer.lightstep.address }}
|
||||
# Access Token used to communicate with the Satellite pool
|
||||
accessToken: {{ .Values.global.tracer.lightstep.accessToken }}
|
||||
{{- else if eq .Values.global.proxy.tracer "zipkin" }}
|
||||
zipkin:
|
||||
# Address of the Zipkin collector
|
||||
address: {{ .Values.global.tracer.zipkin.address | default (print "zipkin." .Release.Namespace ":9411") }}
|
||||
{{- else if eq .Values.global.proxy.tracer "datadog" }}
|
||||
datadog:
|
||||
# Address of the Datadog Agent
|
||||
@@ -88,7 +84,7 @@
|
||||
{{- if .Values.global.enableHigressIstio }}
|
||||
discoveryAddress: {{ printf "istiod.%s.svc" .Values.global.istioNamespace }}:15012
|
||||
{{- else }}
|
||||
discoveryAddress: higress-controller.{{.Release.Namespace}}.svc:15012
|
||||
discoveryAddress: {{ include "controller.name" . }}.{{.Release.Namespace}}.svc:15012
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
proxyStatsMatcher:
|
||||
@@ -109,7 +105,17 @@ metadata:
|
||||
labels:
|
||||
{{- include "gateway.labels" . | nindent 4 }}
|
||||
data:
|
||||
|
||||
higress: |-
|
||||
{{- $existingConfig := lookup "v1" "ConfigMap" .Release.Namespace "higress-config" }}
|
||||
{{- $existingData := dict }}
|
||||
{{- if $existingConfig }}
|
||||
{{- $existingData = index $existingConfig.data "higress" | default "{}" | fromYaml }}
|
||||
{{- end }}
|
||||
{{- $newData := dict }}
|
||||
{{- if and (hasKey .Values "tracing") .Values.tracing.enable }}
|
||||
{{- $_ := set $newData "tracing" .Values.tracing }}
|
||||
{{- end }}
|
||||
{{- toYaml (merge $existingData $newData) | nindent 4 }}
|
||||
# Configuration file for the mesh networks to be used by the Split Horizon EDS.
|
||||
meshNetworks: |-
|
||||
{{- if .Values.global.meshNetworks }}
|
||||
@@ -170,8 +176,8 @@ data:
|
||||
"endpoint": {
|
||||
"address": {
|
||||
"socket_address": {
|
||||
"address": "{{ .Values.skywalking.service.address }}",
|
||||
"port_value": "{{ .Values.skywalking.service.port }}"
|
||||
"address": "{{ .Values.tracing.skywalking.service }}",
|
||||
"port_value": "{{ .Values.tracing.skywalking.port }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ rules:
|
||||
# ingress controller
|
||||
- apiGroups: ["extensions", "networking.k8s.io"]
|
||||
resources: ["ingresses"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
verbs: ["create", "get", "list", "watch", "update", "delete", "patch"]
|
||||
- apiGroups: ["extensions", "networking.k8s.io"]
|
||||
resources: ["ingresses/status"]
|
||||
verbs: ["*"]
|
||||
@@ -36,7 +36,7 @@ rules:
|
||||
# Needed for multicluster secret reading, possibly ingress certs in the future
|
||||
- apiGroups: [""]
|
||||
resources: ["secrets"]
|
||||
verbs: ["get", "watch", "list"]
|
||||
verbs: ["get", "watch", "list", "create", "update", "delete", "patch"]
|
||||
|
||||
- apiGroups: ["networking.higress.io"]
|
||||
resources: ["mcpbridges"]
|
||||
@@ -61,12 +61,12 @@ rules:
|
||||
|
||||
# discovery and routing
|
||||
- apiGroups: [""]
|
||||
resources: ["pods", "nodes", "services", "namespaces", "endpoints"]
|
||||
resources: ["pods", "nodes", "services", "namespaces", "endpoints", "deployments"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
- apiGroups: ["discovery.k8s.io"]
|
||||
resources: ["endpointslices"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
|
||||
|
||||
# Istiod and bootstrap.
|
||||
- apiGroups: ["certificates.k8s.io"]
|
||||
resources:
|
||||
@@ -100,7 +100,7 @@ rules:
|
||||
- apiGroups: ["multicluster.x-k8s.io"]
|
||||
resources: ["serviceimports"]
|
||||
verbs: ["get", "watch", "list"]
|
||||
|
||||
|
||||
# sidecar injection controller
|
||||
- apiGroups: ["admissionregistration.k8s.io"]
|
||||
resources: ["mutatingwebhookconfigurations"]
|
||||
|
||||
@@ -26,9 +26,70 @@ spec:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "controller.serviceAccountName" . }}
|
||||
{{- if .Values.global.priorityClassName }}
|
||||
priorityClassName: "{{ .Values.global.priorityClassName }}"
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.controller.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.controller.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.controller.hub | default .Values.global.hub }}/{{ .Values.controller.image | default "higress" }}:{{ .Values.controller.tag | default .Chart.AppVersion }}"
|
||||
args:
|
||||
- "serve"
|
||||
- --gatewaySelectorKey=higress
|
||||
- --gatewaySelectorValue={{ .Release.Namespace }}-{{ include "gateway.name" . }}
|
||||
- --gatewayHttpPort={{ .Values.gateway.httpPort }}
|
||||
- --gatewayHttpsPort={{ .Values.gateway.httpsPort }}
|
||||
{{- if not .Values.global.enableStatus }}
|
||||
- --enableStatus={{ .Values.global.enableStatus }}
|
||||
{{- end }}
|
||||
- --ingressClass={{ .Values.global.ingressClass }}
|
||||
{{- if .Values.global.watchNamespace }}
|
||||
- --watchNamespace={{ .Values.global.watchNamespace }}
|
||||
{{- end }}
|
||||
- --enableAutomaticHttps={{ .Values.controller.automaticHttps.enabled }}
|
||||
- --automaticHttpsEmail={{ .Values.controller.automaticHttps.email }}
|
||||
env:
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.namespace
|
||||
- name: SERVICE_ACCOUNT
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: spec.serviceAccountName
|
||||
- name: DOMAIN_SUFFIX
|
||||
value: {{ .Values.global.proxy.clusterDomain }}
|
||||
{{- if .Values.controller.env }}
|
||||
{{- range $key, $val := .Values.controller.env }}
|
||||
- name: {{ $key }}
|
||||
value: "{{ $val }}"
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
ports:
|
||||
{{- range $idx, $port := .Values.controller.ports }}
|
||||
- name: {{ $port.name }}
|
||||
containerPort: {{ $port.port }}
|
||||
protocol: {{ $port.protocol }}
|
||||
{{- end }}
|
||||
readinessProbe:
|
||||
{{- toYaml .Values.controller.probe | nindent 12 }}
|
||||
{{- if not (or .Values.global.local .Values.global.kind) }}
|
||||
resources:
|
||||
{{- toYaml .Values.controller.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
volumeMounts:
|
||||
- name: log
|
||||
mountPath: /var/log
|
||||
{{- if not .Values.global.enableHigressIstio }}
|
||||
- name: discovery
|
||||
image: "{{ .Values.pilot.hub | default .Values.global.hub }}/{{ .Values.pilot.image | default "pilot" }}:{{ .Values.pilot.tag | default .Chart.AppVersion }}"
|
||||
@@ -191,64 +252,6 @@ spec:
|
||||
mountPath: /cacerts
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.controller.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.controller.hub | default .Values.global.hub }}/{{ .Values.controller.image | default "higress" }}:{{ .Values.controller.tag | default .Chart.AppVersion }}"
|
||||
args:
|
||||
- "serve"
|
||||
- --gatewaySelectorKey=higress
|
||||
- --gatewaySelectorValue={{ .Release.Namespace }}-{{ include "gateway.name" . }}
|
||||
- --gatewayHttpPort={{ .Values.gateway.httpPort }}
|
||||
- --gatewayHttpsPort={{ .Values.gateway.httpsPort }}
|
||||
{{- if not .Values.global.enableStatus }}
|
||||
- --enableStatus={{ .Values.global.enableStatus }}
|
||||
{{- end }}
|
||||
- --ingressClass={{ .Values.global.ingressClass }}
|
||||
{{- if .Values.global.watchNamespace }}
|
||||
- --watchNamespace={{ .Values.global.watchNamespace }}
|
||||
{{- end }}
|
||||
- --enableAutomaticHttps={{ .Values.controller.automaticHttps.enabled }}
|
||||
- --automaticHttpsEmail={{ .Values.controller.automaticHttps.email }}
|
||||
env:
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.namespace
|
||||
- name: SERVICE_ACCOUNT
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: spec.serviceAccountName
|
||||
- name: DOMAIN_SUFFIX
|
||||
value: {{ .Values.global.proxy.clusterDomain }}
|
||||
{{- if .Values.controller.env }}
|
||||
{{- range $key, $val := .Values.controller.env }}
|
||||
- name: {{ $key }}
|
||||
value: "{{ $val }}"
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
ports:
|
||||
{{- range $idx, $port := .Values.controller.ports }}
|
||||
- name: {{ $port.name }}
|
||||
containerPort: {{ $port.port }}
|
||||
protocol: {{ $port.protocol }}
|
||||
{{- end }}
|
||||
readinessProbe:
|
||||
{{- toYaml .Values.controller.probe | nindent 12 }}
|
||||
{{- if not (or .Values.global.local .Values.global.kind) }}
|
||||
resources:
|
||||
{{- toYaml .Values.controller.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
volumeMounts:
|
||||
- name: log
|
||||
mountPath: /var/log
|
||||
{{- with .Values.controller.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
||||
332
helm/core/templates/daemonset.yaml
Normal file
332
helm/core/templates/daemonset.yaml
Normal file
@@ -0,0 +1,332 @@
|
||||
{{- if eq .Values.gateway.kind "DaemonSet" -}}
|
||||
{{- $o11y := .Values.global.o11y }}
|
||||
{{- $unprivilegedPortSupported := true }}
|
||||
{{- range $index, $node := (lookup "v1" "Node" "default" "").items }}
|
||||
{{- $kernelVersion := $node.status.nodeInfo.kernelVersion }}
|
||||
{{- if $kernelVersion }}
|
||||
{{- $kernelVersion = regexFind "^(\\d+\\.\\d+\\.\\d+)" $kernelVersion }}
|
||||
{{- if and $kernelVersion (semverCompare "<4.11.0" $kernelVersion) }}
|
||||
{{- $unprivilegedPortSupported = false }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end -}}
|
||||
apiVersion: apps/v1
|
||||
kind: DaemonSet
|
||||
metadata:
|
||||
name: {{ include "gateway.name" . }}
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "gateway.labels" . | nindent 4}}
|
||||
annotations:
|
||||
{{- .Values.gateway.annotations | toYaml | nindent 4 }}
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "gateway.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
{{- if .Values.global.enableHigressIstio }}
|
||||
"enableHigressIstio": "true"
|
||||
{{- end }}
|
||||
{{- if .Values.gateway.podAnnotations }}
|
||||
{{- toYaml .Values.gateway.podAnnotations | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
sidecar.istio.io/inject: "false"
|
||||
{{- with .Values.gateway.revision }}
|
||||
istio.io/rev: {{ . }}
|
||||
{{- end }}
|
||||
{{- include "gateway.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.gateway.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "gateway.serviceAccountName" . }}
|
||||
{{- if .Values.global.priorityClassName }}
|
||||
priorityClassName: "{{ .Values.global.priorityClassName }}"
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- if .Values.gateway.securityContext }}
|
||||
{{- toYaml .Values.gateway.securityContext | nindent 8 }}
|
||||
{{- else if and $unprivilegedPortSupported (and (not .Values.gateway.hostNetwork) (semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion)) }}
|
||||
# Safe since 1.22: https://github.com/kubernetes/kubernetes/pull/103326
|
||||
sysctls:
|
||||
- name: net.ipv4.ip_unprivileged_port_start
|
||||
value: "0"
|
||||
{{- end }}
|
||||
containers:
|
||||
{{- if $o11y.enabled }}
|
||||
{{- $config := $o11y.promtail }}
|
||||
- name: promtail
|
||||
image: {{ $config.image.repository }}:{{ $config.image.tag }}
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- -config.file=/etc/promtail/promtail.yaml
|
||||
env:
|
||||
- name: 'HOSTNAME'
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: 'spec.nodeName'
|
||||
ports:
|
||||
- containerPort: {{ $config.port }}
|
||||
name: http-metrics
|
||||
protocol: TCP
|
||||
readinessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: /ready
|
||||
port: {{ $config.port }}
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 1
|
||||
volumeMounts:
|
||||
- name: promtail-config
|
||||
mountPath: "/etc/promtail"
|
||||
- name: log
|
||||
mountPath: /var/log/proxy
|
||||
- name: tmp
|
||||
mountPath: /tmp
|
||||
{{- end }}
|
||||
- name: higress-gateway
|
||||
image: "{{ .Values.gateway.hub | default .Values.global.hub }}/{{ .Values.gateway.image | default "gateway" }}:{{ .Values.gateway.tag | default .Chart.AppVersion }}"
|
||||
args:
|
||||
- proxy
|
||||
- router
|
||||
- --domain
|
||||
- $(POD_NAMESPACE).svc.cluster.local
|
||||
- --proxyLogLevel=warning
|
||||
- --proxyComponentLogLevel=misc:error
|
||||
- --log_output_level=all:info
|
||||
- --serviceCluster=higress-gateway
|
||||
securityContext:
|
||||
{{- if .Values.gateway.containerSecurityContext }}
|
||||
{{- toYaml .Values.gateway.containerSecurityContext | nindent 12 }}
|
||||
{{- else if and $unprivilegedPortSupported (and (not .Values.gateway.hostNetwork) (semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion)) }}
|
||||
# Safe since 1.22: https://github.com/kubernetes/kubernetes/pull/103326
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
allowPrivilegeEscalation: false
|
||||
privileged: false
|
||||
# When enabling lite metrics, the configuration template files need to be replaced.
|
||||
{{- if not .Values.global.liteMetrics }}
|
||||
readOnlyRootFilesystem: true
|
||||
{{- end }}
|
||||
runAsUser: 1337
|
||||
runAsGroup: 1337
|
||||
runAsNonRoot: true
|
||||
{{- else }}
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
add:
|
||||
- NET_BIND_SERVICE
|
||||
runAsUser: 0
|
||||
runAsGroup: 1337
|
||||
runAsNonRoot: false
|
||||
allowPrivilegeEscalation: true
|
||||
{{- end }}
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: spec.nodeName
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.namespace
|
||||
- name: INSTANCE_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: status.podIP
|
||||
- name: HOST_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: status.hostIP
|
||||
- name: SERVICE_ACCOUNT
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.serviceAccountName
|
||||
- name: PILOT_XDS_SEND_TIMEOUT
|
||||
value: 60s
|
||||
- name: PROXY_XDS_VIA_AGENT
|
||||
value: "true"
|
||||
- name: ENABLE_INGRESS_GATEWAY_SDS
|
||||
value: "false"
|
||||
- name: JWT_POLICY
|
||||
value: {{ include "controller.jwtPolicy" . }}
|
||||
- name: ISTIO_META_HTTP10
|
||||
value: "1"
|
||||
- name: ISTIO_META_CLUSTER_ID
|
||||
value: "{{ $.Values.clusterName | default `Kubernetes` }}"
|
||||
- name: INSTANCE_NAME
|
||||
value: "higress-gateway"
|
||||
{{- if .Values.global.liteMetrics }}
|
||||
- name: LITE_METRICS
|
||||
value: "on"
|
||||
{{- end }}
|
||||
{{- if include "skywalking.enabled" . }}
|
||||
- name: ISTIO_BOOTSTRAP_OVERRIDE
|
||||
value: /etc/istio/custom-bootstrap/custom_bootstrap.json
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.networkGateway }}
|
||||
- name: ISTIO_META_REQUESTED_NETWORK_VIEW
|
||||
value: "{{.}}"
|
||||
{{- end }}
|
||||
{{- range $key, $val := .Values.env }}
|
||||
- name: {{ $key }}
|
||||
value: {{ $val | quote }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- containerPort: 15090
|
||||
protocol: TCP
|
||||
name: http-envoy-prom
|
||||
{{- if or .Values.global.local .Values.global.kind }}
|
||||
- containerPort: {{ .Values.gateway.httpPort }}
|
||||
hostPort: {{ .Values.gateway.httpPort }}
|
||||
name: http
|
||||
protocol: TCP
|
||||
- containerPort: {{ .Values.gateway.httpsPort }}
|
||||
hostPort: {{ .Values.gateway.httpsPort }}
|
||||
name: https
|
||||
protocol: TCP
|
||||
{{- end }}
|
||||
readinessProbe:
|
||||
failureThreshold: {{ .Values.gateway.readinessFailureThreshold }}
|
||||
httpGet:
|
||||
path: /healthz/ready
|
||||
port: 15021
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: {{ .Values.gateway.readinessInitialDelaySeconds }}
|
||||
periodSeconds: {{ .Values.gateway.readinessPeriodSeconds }}
|
||||
successThreshold: {{ .Values.gateway.readinessSuccessThreshold }}
|
||||
timeoutSeconds: {{ .Values.gateway.readinessTimeoutSeconds }}
|
||||
{{- if not (or .Values.global.local .Values.global.kind) }}
|
||||
resources:
|
||||
{{- toYaml .Values.gateway.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
volumeMounts:
|
||||
{{- if eq (include "controller.jwtPolicy" .) "third-party-jwt" }}
|
||||
- name: istio-token
|
||||
mountPath: /var/run/secrets/tokens
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
- name: config
|
||||
mountPath: /etc/istio/config
|
||||
- name: istio-ca-root-cert
|
||||
mountPath: /var/run/secrets/istio
|
||||
- name: istio-data
|
||||
mountPath: /var/lib/istio/data
|
||||
- name: podinfo
|
||||
mountPath: /etc/istio/pod
|
||||
- name: proxy-socket
|
||||
mountPath: /etc/istio/proxy
|
||||
{{- if include "skywalking.enabled" . }}
|
||||
- mountPath: /etc/istio/custom-bootstrap
|
||||
name: custom-bootstrap-volume
|
||||
{{- end }}
|
||||
{{- if .Values.global.volumeWasmPlugins }}
|
||||
- mountPath: /opt/plugins
|
||||
name: local-wasmplugins-volume
|
||||
{{- end }}
|
||||
{{- if $o11y.enabled }}
|
||||
- mountPath: /var/log/proxy
|
||||
name: log
|
||||
{{- end }}
|
||||
{{- if .Values.gateway.hostNetwork }}
|
||||
hostNetwork: {{ .Values.gateway.hostNetwork }}
|
||||
dnsPolicy: ClusterFirstWithHostNet
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
volumes:
|
||||
{{- if eq (include "controller.jwtPolicy" .) "third-party-jwt" }}
|
||||
- name: istio-token
|
||||
projected:
|
||||
sources:
|
||||
- serviceAccountToken:
|
||||
audience: istio-ca
|
||||
expirationSeconds: 43200
|
||||
path: istio-token
|
||||
{{- end }}
|
||||
- name: istio-ca-root-cert
|
||||
configMap:
|
||||
{{- if .Values.global.enableHigressIstio }}
|
||||
name: istio-ca-root-cert
|
||||
{{- else }}
|
||||
name: higress-ca-root-cert
|
||||
{{- end }}
|
||||
- name: config
|
||||
configMap:
|
||||
name: higress-config
|
||||
{{- if include "skywalking.enabled" . }}
|
||||
- configMap:
|
||||
defaultMode: 420
|
||||
name: higress-custom-bootstrap
|
||||
name: custom-bootstrap-volume
|
||||
{{- end }}
|
||||
- name: istio-data
|
||||
emptyDir: {}
|
||||
- name: proxy-socket
|
||||
emptyDir: {}
|
||||
{{- if $o11y.enabled }}
|
||||
- name: log
|
||||
emptyDir: {}
|
||||
- name: tmp
|
||||
emptyDir: {}
|
||||
- name: promtail-config
|
||||
configMap:
|
||||
name: higress-promtail
|
||||
{{- end }}
|
||||
- name: podinfo
|
||||
downwardAPI:
|
||||
defaultMode: 420
|
||||
items:
|
||||
- fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.labels
|
||||
path: labels
|
||||
- fieldRef:
|
||||
apiVersion: v1
|
||||
fieldPath: metadata.annotations
|
||||
path: annotations
|
||||
- path: cpu-request
|
||||
resourceFieldRef:
|
||||
containerName: higress-gateway
|
||||
divisor: 1m
|
||||
resource: requests.cpu
|
||||
- path: cpu-limit
|
||||
resourceFieldRef:
|
||||
containerName: higress-gateway
|
||||
divisor: 1m
|
||||
resource: limits.cpu
|
||||
{{- if .Values.global.volumeWasmPlugins }}
|
||||
- name: local-wasmplugins-volume
|
||||
hostPath:
|
||||
path: /opt/plugins
|
||||
type: Directory
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,3 +1,4 @@
|
||||
{{- if eq .Values.gateway.kind "Deployment" -}}
|
||||
{{- $o11y := .Values.global.o11y }}
|
||||
{{- $unprivilegedPortSupported := true }}
|
||||
{{- range $index, $node := (lookup "v1" "Node" "default" "").items }}
|
||||
@@ -58,6 +59,9 @@ spec:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "gateway.serviceAccountName" . }}
|
||||
{{- if .Values.global.priorityClassName }}
|
||||
priorityClassName: "{{ .Values.global.priorityClassName }}"
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- if .Values.gateway.securityContext }}
|
||||
{{- toYaml .Values.gateway.securityContext | nindent 8 }}
|
||||
@@ -68,40 +72,6 @@ spec:
|
||||
value: "0"
|
||||
{{- end }}
|
||||
containers:
|
||||
{{- if $o11y.enabled }}
|
||||
{{- $config := $o11y.promtail }}
|
||||
- name: promtail
|
||||
image: {{ $config.image.repository }}:{{ $config.image.tag }}
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- -config.file=/etc/promtail/promtail.yaml
|
||||
env:
|
||||
- name: 'HOSTNAME'
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: 'spec.nodeName'
|
||||
ports:
|
||||
- containerPort: {{ $config.port }}
|
||||
name: http-metrics
|
||||
protocol: TCP
|
||||
readinessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: /ready
|
||||
port: {{ $config.port }}
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 1
|
||||
volumeMounts:
|
||||
- name: promtail-config
|
||||
mountPath: "/etc/promtail"
|
||||
- name: log
|
||||
mountPath: /var/log/proxy
|
||||
- name: tmp
|
||||
mountPath: /tmp
|
||||
{{- end }}
|
||||
- name: higress-gateway
|
||||
image: "{{ .Values.gateway.hub | default .Values.global.hub }}/{{ .Values.gateway.image | default "gateway" }}:{{ .Values.gateway.tag | default .Chart.AppVersion }}"
|
||||
args:
|
||||
@@ -202,6 +172,9 @@ spec:
|
||||
value: {{ $val | quote }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- containerPort: 15020
|
||||
protocol: TCP
|
||||
name: istio-prom
|
||||
- containerPort: 15090
|
||||
protocol: TCP
|
||||
name: http-envoy-prom
|
||||
@@ -241,7 +214,7 @@ spec:
|
||||
mountPath: /var/run/secrets/istio
|
||||
- name: istio-data
|
||||
mountPath: /var/lib/istio/data
|
||||
- name: podinfo
|
||||
- name: podinfo
|
||||
mountPath: /etc/istio/pod
|
||||
- name: proxy-socket
|
||||
mountPath: /etc/istio/proxy
|
||||
@@ -257,6 +230,40 @@ spec:
|
||||
- mountPath: /var/log/proxy
|
||||
name: log
|
||||
{{- end }}
|
||||
{{- if $o11y.enabled }}
|
||||
{{- $config := $o11y.promtail }}
|
||||
- name: promtail
|
||||
image: {{ $config.image.repository }}:{{ $config.image.tag }}
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- -config.file=/etc/promtail/promtail.yaml
|
||||
env:
|
||||
- name: 'HOSTNAME'
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: 'spec.nodeName'
|
||||
ports:
|
||||
- containerPort: {{ $config.port }}
|
||||
name: http-metrics
|
||||
protocol: TCP
|
||||
readinessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: /ready
|
||||
port: {{ $config.port }}
|
||||
scheme: HTTP
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 1
|
||||
volumeMounts:
|
||||
- name: promtail-config
|
||||
mountPath: "/etc/promtail"
|
||||
- name: log
|
||||
mountPath: /var/log/proxy
|
||||
- name: tmp
|
||||
mountPath: /tmp
|
||||
{{- end }}
|
||||
{{- if .Values.gateway.hostNetwork }}
|
||||
hostNetwork: {{ .Values.gateway.hostNetwork }}
|
||||
dnsPolicy: ClusterFirstWithHostNet
|
||||
@@ -340,3 +347,4 @@ spec:
|
||||
path: /opt/plugins
|
||||
type: Directory
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
6
helm/core/templates/ingressclass.yaml
Normal file
6
helm/core/templates/ingressclass.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: IngressClass
|
||||
metadata:
|
||||
name: {{ .Values.global.ingressClass }}
|
||||
spec:
|
||||
controller: higress.io/higress-controller
|
||||
@@ -15,6 +15,9 @@ spec:
|
||||
{{- with .Values.gateway.service.loadBalancerIP }}
|
||||
loadBalancerIP: "{{ . }}"
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.service.loadBalancerClass }}
|
||||
loadBalancerClass: "{{ . }}"
|
||||
{{- end }}
|
||||
{{- with .Values.gateway.service.loadBalancerSourceRanges }}
|
||||
loadBalancerSourceRanges:
|
||||
{{ toYaml . | indent 4 }}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
revision: ""
|
||||
global:
|
||||
liteMetrics: false
|
||||
liteMetrics: true
|
||||
xdsMaxRecvMsgSize: "104857600"
|
||||
defaultUpstreamConcurrencyThreshold: 10000
|
||||
enableSRDS: true
|
||||
@@ -178,9 +178,9 @@ global:
|
||||
# Default port for Pilot agent health checks. A value of 0 will disable health checking.
|
||||
statusPort: 15020
|
||||
|
||||
# Specify which tracer to use. One of: zipkin, lightstep, datadog, stackdriver.
|
||||
# Specify which tracer to use. One of: lightstep, datadog, stackdriver.
|
||||
# If using stackdriver tracer outside GCP, set env GOOGLE_APPLICATION_CREDENTIALS to the GCP credential file.
|
||||
tracer: "zipkin"
|
||||
tracer: ""
|
||||
|
||||
# Controls if sidecar is injected at the front of the container list and blocks the start of the other containers until the proxy is ready
|
||||
holdApplicationUntilProxyStarts: false
|
||||
@@ -330,12 +330,8 @@ global:
|
||||
maxNumberOfAnnotations: 200
|
||||
# The global default max number of attributes per span.
|
||||
maxNumberOfAttributes: 200
|
||||
zipkin:
|
||||
# Host:Port for reporting trace data in zipkin format. If not specified, will default to
|
||||
# zipkin service (port 9411) in the same namespace as the other istio components.
|
||||
address: ""
|
||||
|
||||
# Use the Mesh Control Protocol (MCP) for configuring Istiod. Requires an MCP source.
|
||||
|
||||
useMCP: false
|
||||
|
||||
# Observability (o11y) configurations
|
||||
@@ -343,7 +339,7 @@ global:
|
||||
enabled: false
|
||||
promtail:
|
||||
image:
|
||||
repository: grafana/promtail
|
||||
repository: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/promtail
|
||||
tag: 2.9.4
|
||||
port: 3101
|
||||
resources:
|
||||
@@ -396,6 +392,9 @@ gateway:
|
||||
replicas: 2
|
||||
image: gateway
|
||||
|
||||
# -- Use a `DaemonSet` or `Deployment`
|
||||
kind: Deployment
|
||||
|
||||
# The number of successive failed probes before indicating readiness failure.
|
||||
readinessFailureThreshold: 30
|
||||
|
||||
@@ -468,6 +467,7 @@ gateway:
|
||||
targetPort: 443
|
||||
annotations: {}
|
||||
loadBalancerIP: ""
|
||||
loadBalancerClass: ""
|
||||
loadBalancerSourceRanges: []
|
||||
externalTrafficPolicy: ""
|
||||
|
||||
@@ -664,9 +664,15 @@ pilot:
|
||||
podLabels: {}
|
||||
|
||||
|
||||
# Skywalking config settings
|
||||
skywalking:
|
||||
enabled: false
|
||||
service:
|
||||
address: ~
|
||||
port: 11800
|
||||
# Tracing config settings
|
||||
tracing:
|
||||
enable: false
|
||||
sampling: 100
|
||||
timeout: 500
|
||||
skywalking:
|
||||
# access_token: ""
|
||||
service: ""
|
||||
port: 11800
|
||||
# zipkin:
|
||||
# service: ""
|
||||
# port: 9411
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
dependencies:
|
||||
- name: higress-core
|
||||
repository: file://../core
|
||||
version: 1.4.0
|
||||
version: 1.4.2
|
||||
- name: higress-console
|
||||
repository: https://higress.io/helm-charts/
|
||||
version: 1.4.0
|
||||
digest: sha256:bf4c58ac28d4691907eab44a13eee398fc05ade95cdae07cb91d7e20ce4ba382
|
||||
generated: "2024-05-29T21:18:32.791995+08:00"
|
||||
version: 1.4.2
|
||||
digest: sha256:31b557e55584e589b140ae9b89cfc8b99df91771c7d28465c3a2b06a4f35a192
|
||||
generated: "2024-07-26T13:53:23.225023+08:00"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
apiVersion: v2
|
||||
appVersion: 1.4.0
|
||||
appVersion: 1.4.2
|
||||
description: Helm chart for deploying Higress gateways
|
||||
icon: https://higress.io/img/higress_logo_small.png
|
||||
home: http://higress.io/
|
||||
@@ -12,9 +12,9 @@ sources:
|
||||
dependencies:
|
||||
- name: higress-core
|
||||
repository: "file://../core"
|
||||
version: 1.4.0
|
||||
version: 1.4.2
|
||||
- name: higress-console
|
||||
repository: "https://higress.io/helm-charts/"
|
||||
version: 1.4.0
|
||||
version: 1.4.2
|
||||
type: application
|
||||
version: 1.4.0
|
||||
version: 1.4.2
|
||||
|
||||
21
istio/1.12/patches/istio/20240607-fix-stats.patch
Normal file
21
istio/1.12/patches/istio/20240607-fix-stats.patch
Normal file
@@ -0,0 +1,21 @@
|
||||
diff -Naur istio/tools/packaging/common/envoy_bootstrap.json istio-new/tools/packaging/common/envoy_bootstrap.json
|
||||
--- istio/tools/packaging/common/envoy_bootstrap.json 2024-06-07 16:50:21.000000000 +0800
|
||||
+++ istio-new/tools/packaging/common/envoy_bootstrap.json 2024-06-07 16:47:42.000000000 +0800
|
||||
@@ -38,7 +38,7 @@
|
||||
"stats_tags": [
|
||||
{
|
||||
"tag_name": "cluster_name",
|
||||
- "regex": "^cluster\\.((.+?(\\..+?\\.svc\\.cluster\\.local)?)\\.)"
|
||||
+ "regex": "^cluster\\.((.*?)\\.)(http1\\.|http2\\.|health_check\\.|zone\\.|external\\.|circuit_breakers\\.|[^\\.]+$)"
|
||||
},
|
||||
{
|
||||
"tag_name": "tcp_prefix",
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
{
|
||||
"tag_name": "http_conn_manager_prefix",
|
||||
- "regex": "^http\\.(((?:[_.[:digit:]]*|[_\\[\\]aAbBcCdDeEfF[:digit:]]*))\\.)"
|
||||
+ "regex": "^http\\.(((outbound_([0-9]{1,3}\\.{0,1}){4}_\\d{0,5})|([^\\.]+))\\.)"
|
||||
},
|
||||
{
|
||||
"tag_name": "listener_address",
|
||||
53
istio/1.12/patches/istio/20240619-ai-stats.patch
Normal file
53
istio/1.12/patches/istio/20240619-ai-stats.patch
Normal file
@@ -0,0 +1,53 @@
|
||||
diff -Naur istio/tools/packaging/common/envoy_bootstrap.json istio-new/tools/packaging/common/envoy_bootstrap.json
|
||||
--- istio/tools/packaging/common/envoy_bootstrap.json 2024-06-19 13:39:49.179159469 +0800
|
||||
+++ istio-new/tools/packaging/common/envoy_bootstrap.json 2024-06-19 13:39:28.299159059 +0800
|
||||
@@ -37,6 +37,18 @@
|
||||
"use_all_default_tags": false,
|
||||
"stats_tags": [
|
||||
{
|
||||
+ "tag_name": "ai_route",
|
||||
+ "regex": "^wasmcustom\\.route\\.((.*?)\\.)upstream"
|
||||
+ },
|
||||
+ {
|
||||
+ "tag_name": "ai_cluster",
|
||||
+ "regex": "^wasmcustom\\..*?\\.upstream\\.((.*?)\\.)model"
|
||||
+ },
|
||||
+ {
|
||||
+ "tag_name": "ai_model",
|
||||
+ "regex": "^wasmcustom\\..*?\\.model\\.((.*?)\\.)(input_token|output_token)"
|
||||
+ },
|
||||
+ {
|
||||
"tag_name": "cluster_name",
|
||||
"regex": "^cluster\\.((.*?)\\.)(http1\\.|http2\\.|health_check\\.|zone\\.|external\\.|circuit_breakers\\.|[^\\.]+$)"
|
||||
},
|
||||
diff -Naur istio/tools/packaging/common/envoy_bootstrap_lite.json istio-new/tools/packaging/common/envoy_bootstrap_lite.json
|
||||
--- istio/tools/packaging/common/envoy_bootstrap_lite.json 2024-06-19 13:39:49.175159469 +0800
|
||||
+++ istio-new/tools/packaging/common/envoy_bootstrap_lite.json 2024-06-19 13:38:52.283158352 +0800
|
||||
@@ -37,6 +37,18 @@
|
||||
"use_all_default_tags": false,
|
||||
"stats_tags": [
|
||||
{
|
||||
+ "tag_name": "ai_route",
|
||||
+ "regex": "^wasmcustom\\.route\\.((.*?)\\.)upstream"
|
||||
+ },
|
||||
+ {
|
||||
+ "tag_name": "ai_cluster",
|
||||
+ "regex": "^wasmcustom\\..*?\\.upstream\\.((.*?)\\.)model"
|
||||
+ },
|
||||
+ {
|
||||
+ "tag_name": "ai_model",
|
||||
+ "regex": "^wasmcustom\\..*?\\.model\\.((.*?)\\.)(input_token|output_token)"
|
||||
+ },
|
||||
+ {
|
||||
"tag_name": "response_code_class",
|
||||
"regex": "_rq(_(\\dxx))$"
|
||||
},
|
||||
@@ -60,7 +72,7 @@
|
||||
"prefix": "vhost"
|
||||
},
|
||||
{
|
||||
- "safe_regex": {"regex": "^http.*rds.*", "google_re2":{}}
|
||||
+ "safe_regex": {"regex": "^http.*\\.rds\\..*", "google_re2":{}}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -391,7 +391,7 @@ func (s *Server) initAutomaticHttps() error {
|
||||
ServerAddress: s.CertHttpAddress,
|
||||
Email: s.AutomaticHttpsEmail,
|
||||
}
|
||||
certServer, err := cert.NewServer(s.kubeClient.Kube(), certOption)
|
||||
certServer, err := cert.NewServer(s.kubeClient.Kube(), s.xdsServer, certOption)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -17,10 +17,15 @@ package cert
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"sync"
|
||||
|
||||
"github.com/caddyserver/certmagic"
|
||||
"github.com/mholt/acmez"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
"istio.io/istio/pilot/pkg/model"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
)
|
||||
|
||||
@@ -28,6 +33,10 @@ const (
|
||||
EventCertObtained = "cert_obtained"
|
||||
)
|
||||
|
||||
var (
|
||||
cfg *certmagic.Config
|
||||
)
|
||||
|
||||
type CertMgr struct {
|
||||
cfg *certmagic.Config
|
||||
client kubernetes.Interface
|
||||
@@ -39,9 +48,10 @@ type CertMgr struct {
|
||||
ingressSolver acmez.Solver
|
||||
configMgr *ConfigMgr
|
||||
secretMgr *SecretMgr
|
||||
XDSUpdater model.XDSUpdater
|
||||
}
|
||||
|
||||
func InitCertMgr(opts *Option, clientSet kubernetes.Interface, config *Config) (*CertMgr, error) {
|
||||
func InitCertMgr(opts *Option, clientSet kubernetes.Interface, config *Config, XDSUpdater model.XDSUpdater, configMgr *ConfigMgr) (*CertMgr, error) {
|
||||
CertLog.Infof("certmgr init config: %+v", config)
|
||||
// Init certmagic config
|
||||
// First make a pointer to a Cache as we need to reference the same Cache in
|
||||
@@ -49,21 +59,29 @@ func InitCertMgr(opts *Option, clientSet kubernetes.Interface, config *Config) (
|
||||
var cache *certmagic.Cache
|
||||
var storage certmagic.Storage
|
||||
storage, _ = NewConfigmapStorage(opts.Namespace, clientSet)
|
||||
renewalWindowRatio := float64(config.RenewBeforeDays / RenewMaxDays)
|
||||
renewalWindowRatio := float64(config.RenewBeforeDays) / float64(RenewMaxDays)
|
||||
logger := zap.New(zapcore.NewCore(
|
||||
zapcore.NewConsoleEncoder(zap.NewProductionEncoderConfig()),
|
||||
os.Stderr,
|
||||
zap.DebugLevel,
|
||||
))
|
||||
magicConfig := certmagic.Config{
|
||||
RenewalWindowRatio: renewalWindowRatio,
|
||||
Storage: storage,
|
||||
Logger: logger,
|
||||
}
|
||||
cache = certmagic.NewCache(certmagic.CacheOptions{
|
||||
GetConfigForCert: func(cert certmagic.Certificate) (*certmagic.Config, error) {
|
||||
// Here we use New to get a valid Config associated with the same cache.
|
||||
// The provided Config is used as a template and will be completed with
|
||||
// any defaults that are set in the Default config.
|
||||
return certmagic.New(cache, magicConfig), nil
|
||||
return cfg, nil
|
||||
},
|
||||
Logger: logger,
|
||||
})
|
||||
// init certmagic
|
||||
cfg := certmagic.New(cache, magicConfig)
|
||||
cfg = certmagic.New(cache, magicConfig)
|
||||
|
||||
// Init certmagic acme
|
||||
issuer := config.GetIssuer(IssuerTypeLetsencrypt)
|
||||
if issuer == nil {
|
||||
@@ -85,7 +103,6 @@ func InitCertMgr(opts *Option, clientSet kubernetes.Interface, config *Config) (
|
||||
// init issuers
|
||||
cfg.Issuers = []certmagic.Issuer{myACME}
|
||||
|
||||
configMgr, _ := NewConfigMgr(opts.Namespace, clientSet)
|
||||
secretMgr, _ := NewSecretMgr(opts.Namespace, clientSet)
|
||||
|
||||
certMgr := &CertMgr{
|
||||
@@ -97,6 +114,7 @@ func InitCertMgr(opts *Option, clientSet kubernetes.Interface, config *Config) (
|
||||
configMgr: configMgr,
|
||||
secretMgr: secretMgr,
|
||||
cache: cache,
|
||||
XDSUpdater: XDSUpdater,
|
||||
}
|
||||
certMgr.cfg.OnEvent = certMgr.OnEvent
|
||||
return certMgr, nil
|
||||
@@ -149,18 +167,31 @@ func (s *CertMgr) Reconcile(ctx context.Context, oldConfig *Config, newConfig *C
|
||||
// sync email
|
||||
s.myACME.Email = newIssuer.Email
|
||||
// sync RenewalWindowRatio
|
||||
s.cfg.RenewalWindowRatio = float64(newConfig.RenewBeforeDays / RenewMaxDays)
|
||||
renewalWindowRatio := float64(newConfig.RenewBeforeDays) / float64(RenewMaxDays)
|
||||
s.cfg.RenewalWindowRatio = renewalWindowRatio
|
||||
// start cache
|
||||
s.cache.Start()
|
||||
// sync domains
|
||||
s.manageSync(context.Background(), newDomains)
|
||||
s.configMgr.SetConfig(newConfig)
|
||||
CertLog.Infof("certMgr start to manageSync domains:+v%", newDomains)
|
||||
s.manageSync(context.Background(), newDomains)
|
||||
CertLog.Infof("certMgr manageSync domains done")
|
||||
} else {
|
||||
// stop cache maintainAssets
|
||||
s.cache.Stop()
|
||||
s.configMgr.SetConfig(newConfig)
|
||||
}
|
||||
|
||||
if oldConfig != nil && newConfig != nil {
|
||||
if oldConfig.FallbackForInvalidSecret != newConfig.FallbackForInvalidSecret || !reflect.DeepEqual(oldConfig.CredentialConfig, newConfig.CredentialConfig) {
|
||||
CertLog.Infof("ingress need to full push")
|
||||
s.XDSUpdater.ConfigUpdate(&model.PushRequest{
|
||||
Full: true,
|
||||
Reason: []model.TriggerReason{"higress-https-updated"},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -86,22 +86,35 @@ func (c *Config) GetSecretNameByDomain(issuerName IssuerName, domain string) str
|
||||
return ""
|
||||
}
|
||||
|
||||
func ParseTLSSecret(tlsSecret string) (string, string) {
|
||||
secrets := strings.Split(tlsSecret, "/")
|
||||
switch len(secrets) {
|
||||
case 1:
|
||||
return "", tlsSecret
|
||||
case 2:
|
||||
return secrets[0], secrets[1]
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func (c *Config) Validate() error {
|
||||
// check acmeIssuer
|
||||
if len(c.ACMEIssuer) == 0 {
|
||||
return fmt.Errorf("acmeIssuer is empty")
|
||||
}
|
||||
for _, issuer := range c.ACMEIssuer {
|
||||
switch issuer.Name {
|
||||
case IssuerTypeLetsencrypt:
|
||||
if issuer.Email == "" {
|
||||
return fmt.Errorf("acmeIssuer %s email is empty", issuer.Name)
|
||||
if c.AutomaticHttps {
|
||||
if len(c.ACMEIssuer) == 0 {
|
||||
return fmt.Errorf("no acmeIssuer configuration found when automaticHttps is enable")
|
||||
}
|
||||
for _, issuer := range c.ACMEIssuer {
|
||||
switch issuer.Name {
|
||||
case IssuerTypeLetsencrypt:
|
||||
if issuer.Email == "" {
|
||||
return fmt.Errorf("acmeIssuer %s email is empty", issuer.Name)
|
||||
}
|
||||
if !ValidateEmail(issuer.Email) {
|
||||
return fmt.Errorf("acmeIssuer %s email %s is invalid", issuer.Name, issuer.Email)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("acmeIssuer name %s is not supported", issuer.Name)
|
||||
}
|
||||
if !ValidateEmail(issuer.Email) {
|
||||
return fmt.Errorf("acmeIssuer %s email %s is invalid", issuer.Name, issuer.Email)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("acmeIssuer name %s is not supported", issuer.Name)
|
||||
}
|
||||
}
|
||||
// check credentialConfig
|
||||
@@ -111,14 +124,20 @@ func (c *Config) Validate() error {
|
||||
}
|
||||
if credential.TLSSecret == "" {
|
||||
return fmt.Errorf("credentialConfig tlsSecret is empty")
|
||||
} else {
|
||||
ns, secret := ParseTLSSecret(credential.TLSSecret)
|
||||
if ns == "" && secret == "" {
|
||||
return fmt.Errorf("credentialConfig tlsSecret %s is not supported", credential.TLSSecret)
|
||||
}
|
||||
}
|
||||
|
||||
if credential.TLSIssuer == IssuerTypeLetsencrypt {
|
||||
if len(credential.Domains) > 1 {
|
||||
return fmt.Errorf("credentialConfig tlsIssuer %s only support one domain", credential.TLSIssuer)
|
||||
}
|
||||
}
|
||||
if credential.TLSIssuer != IssuerTypeLetsencrypt && len(credential.TLSIssuer) > 0 {
|
||||
return fmt.Errorf("credential tls issuer %s is not support", credential.TLSIssuer)
|
||||
return fmt.Errorf("credential tls issuer %s is not supported", credential.TLSIssuer)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -120,3 +120,36 @@ func TestMatchSecretNameByDomain(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseTLSSecret(t *testing.T) {
|
||||
tests := []struct {
|
||||
tlsSecret string
|
||||
expectedNamespace string
|
||||
expectedSecretName string
|
||||
}{
|
||||
{
|
||||
tlsSecret: "example-com-tls",
|
||||
expectedNamespace: "",
|
||||
expectedSecretName: "example-com-tls",
|
||||
},
|
||||
|
||||
{
|
||||
tlsSecret: "kube-system/example-com-tls",
|
||||
expectedNamespace: "kube-system",
|
||||
expectedSecretName: "example-com-tls",
|
||||
},
|
||||
{
|
||||
tlsSecret: "kube-system/example-com/wildcard",
|
||||
expectedNamespace: "",
|
||||
expectedSecretName: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.tlsSecret, func(t *testing.T) {
|
||||
resultNamespace, resultSecretName := ParseTLSSecret(tt.tlsSecret)
|
||||
assert.Equal(t, tt.expectedNamespace, resultNamespace)
|
||||
assert.Equal(t, tt.expectedSecretName, resultSecretName)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
v1 "k8s.io/api/core/v1"
|
||||
@@ -27,10 +26,6 @@ import (
|
||||
"k8s.io/client-go/kubernetes"
|
||||
)
|
||||
|
||||
const (
|
||||
SecretNamePrefix = "higress-secret-"
|
||||
)
|
||||
|
||||
type SecretMgr struct {
|
||||
client kubernetes.Interface
|
||||
namespace string
|
||||
@@ -46,13 +41,21 @@ func NewSecretMgr(namespace string, client kubernetes.Interface) (*SecretMgr, er
|
||||
}
|
||||
|
||||
func (s *SecretMgr) Update(domain string, secretName string, privateKey []byte, certificate []byte, notBefore time.Time, notAfter time.Time, isRenew bool) error {
|
||||
//secretName := s.getSecretName(domain)
|
||||
secret := s.constructSecret(domain, privateKey, certificate, notBefore, notAfter, isRenew)
|
||||
_, err := s.client.CoreV1().Secrets(s.namespace).Get(context.Background(), secretName, metav1.GetOptions{})
|
||||
CertLog.Infof("update secret, domain:%s, secretName:%s, notBefore:%v, notAfter:%v, isRenew:%t", domain, secretName, notBefore, notAfter, isRenew)
|
||||
name := secretName
|
||||
namespace := s.namespace
|
||||
namespaceP, secretP := ParseTLSSecret(secretName)
|
||||
if namespaceP != "" {
|
||||
namespace = namespaceP
|
||||
name = secretP
|
||||
}
|
||||
|
||||
secret := s.constructSecret(domain, name, namespace, privateKey, certificate, notBefore, notAfter, isRenew)
|
||||
_, err := s.client.CoreV1().Secrets(namespace).Get(context.Background(), name, metav1.GetOptions{})
|
||||
if err != nil {
|
||||
if errors.IsNotFound(err) {
|
||||
// create secret
|
||||
_, err2 := s.client.CoreV1().Secrets(s.namespace).Create(context.Background(), secret, metav1.CreateOptions{})
|
||||
_, err2 := s.client.CoreV1().Secrets(namespace).Create(context.Background(), secret, metav1.CreateOptions{})
|
||||
return err2
|
||||
}
|
||||
return err
|
||||
@@ -61,7 +64,7 @@ func (s *SecretMgr) Update(domain string, secretName string, privateKey []byte,
|
||||
if _, ok := secret.Annotations["higress.io/cert-domain"]; !ok {
|
||||
return fmt.Errorf("the secret name %s is not automatic https secret name for the domain:%s, please rename it in config", secretName, domain)
|
||||
}
|
||||
_, err1 := s.client.CoreV1().Secrets(s.namespace).Update(context.Background(), secret, metav1.UpdateOptions{})
|
||||
_, err1 := s.client.CoreV1().Secrets(namespace).Update(context.Background(), secret, metav1.UpdateOptions{})
|
||||
if err1 != nil {
|
||||
return err1
|
||||
}
|
||||
@@ -69,23 +72,13 @@ func (s *SecretMgr) Update(domain string, secretName string, privateKey []byte,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SecretMgr) Delete(domain string) error {
|
||||
secretName := s.getSecretName(domain)
|
||||
err := s.client.CoreV1().Secrets(s.namespace).Delete(context.Background(), secretName, metav1.DeleteOptions{})
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *SecretMgr) getSecretName(domain string) string {
|
||||
return SecretNamePrefix + strings.ReplaceAll(strings.TrimSpace(domain), ".", "-")
|
||||
}
|
||||
|
||||
func (s *SecretMgr) constructSecret(domain string, privateKey []byte, certificate []byte, notBefore time.Time, notAfter time.Time, isRenew bool) *v1.Secret {
|
||||
secretName := s.getSecretName(domain)
|
||||
func (s *SecretMgr) constructSecret(domain string, name string, namespace string, privateKey []byte, certificate []byte, notBefore time.Time, notAfter time.Time, isRenew bool) *v1.Secret {
|
||||
annotationMap := make(map[string]string, 0)
|
||||
annotationMap["higress.io/cert-domain"] = domain
|
||||
annotationMap["higress.io/cert-notAfter"] = notAfter.Format("2006-01-02 15:04:05")
|
||||
annotationMap["higress.io/cert-notBefore"] = notBefore.Format("2006-01-02 15:04:05")
|
||||
annotationMap["higress.io/cert-renew"] = strconv.FormatBool(isRenew)
|
||||
annotationMap["higress.io/cert-source"] = string(IssuerTypeLetsencrypt)
|
||||
if isRenew {
|
||||
annotationMap["higress.io/cert-renew-time"] = time.Now().Format("2006-01-02 15:04:05")
|
||||
}
|
||||
@@ -97,8 +90,8 @@ func (s *SecretMgr) constructSecret(domain string, privateKey []byte, certificat
|
||||
dataMap["tls.crt"] = certificate
|
||||
secret := &v1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: secretName,
|
||||
Namespace: s.namespace,
|
||||
Name: name,
|
||||
Namespace: namespace,
|
||||
Annotations: annotationMap,
|
||||
},
|
||||
Type: v1.SecretTypeTLS,
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/caddyserver/certmagic"
|
||||
"istio.io/istio/pilot/pkg/model"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
)
|
||||
|
||||
@@ -37,12 +38,14 @@ type Server struct {
|
||||
clientSet kubernetes.Interface
|
||||
controller *Controller
|
||||
certMgr *CertMgr
|
||||
XDSUpdater model.XDSUpdater
|
||||
}
|
||||
|
||||
func NewServer(clientSet kubernetes.Interface, opts *Option) (*Server, error) {
|
||||
func NewServer(clientSet kubernetes.Interface, XDSUpdater model.XDSUpdater, opts *Option) (*Server, error) {
|
||||
server := &Server{
|
||||
clientSet: clientSet,
|
||||
opts: opts,
|
||||
clientSet: clientSet,
|
||||
opts: opts,
|
||||
XDSUpdater: XDSUpdater,
|
||||
}
|
||||
return server, nil
|
||||
}
|
||||
@@ -65,7 +68,7 @@ func (s *Server) InitServer() error {
|
||||
return err
|
||||
}
|
||||
// init certmgr
|
||||
certMgr, err := InitCertMgr(s.opts, s.clientSet, defaultConfig) // config and start
|
||||
certMgr, err := InitCertMgr(s.opts, s.clientSet, defaultConfig, s.XDSUpdater, configMgr) // config and start
|
||||
s.certMgr = certMgr
|
||||
// init controller
|
||||
controller, err := NewController(s.clientSet, s.opts.Namespace, certMgr, configMgr)
|
||||
|
||||
@@ -32,7 +32,7 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
CertificatesPrefix = "/certificates"
|
||||
CertificatesPrefix = "certificates"
|
||||
ConfigmapStoreCertficatesPrefix = "higress-cert-store-certificates-"
|
||||
ConfigmapStoreDefaultName = "higress-cert-store-default"
|
||||
)
|
||||
@@ -155,7 +155,7 @@ func (s *ConfigmapStorage) List(ctx context.Context, prefix string, recursive bo
|
||||
// Check if the prefix corresponds to a specific key
|
||||
hashPrefix := fastHash([]byte(prefix))
|
||||
if strings.HasPrefix(prefix, CertificatesPrefix) {
|
||||
// If the prefix is "/certificates", get all ConfigMaps and traverse each one
|
||||
// If the prefix is "certificates/", get all ConfigMaps and traverse each one
|
||||
// List all ConfigMaps in the namespace with label higress.io/cert-https=true
|
||||
configmaps, err := s.client.CoreV1().ConfigMaps(s.namespace).List(ctx, metav1.ListOptions{FieldSelector: "metadata.annotations['higress.io/cert-https'] == 'true'"})
|
||||
if err != nil {
|
||||
@@ -289,14 +289,29 @@ func (s *ConfigmapStorage) String() string {
|
||||
return "ConfigmapStorage"
|
||||
}
|
||||
|
||||
// getConfigmapStoreNameByKey determines the storage name for a given key.
|
||||
// It checks if the key starts with 'certificates/' and if so, the key pattern should match one of the following:
|
||||
// 'certificates/<issuerKey>/<domain>/<domain>.json',
|
||||
// 'certificates/<issuerKey>/<domain>/<domain>.crt',
|
||||
// or 'certificates/<issuerKey>/<domain>/<domain>.key'.
|
||||
// It then returns the corresponding ConfigMap name.
|
||||
// If the key does not start with 'certificates/', it returns the default store name.
|
||||
//
|
||||
// Parameters:
|
||||
//
|
||||
// key - The configuration map key that needs to be mapped to a storage name.
|
||||
//
|
||||
// Returns:
|
||||
//
|
||||
// string - The calculated or default storage name based on the key.
|
||||
func (s *ConfigmapStorage) getConfigmapStoreNameByKey(key string) string {
|
||||
parts := strings.SplitN(key, "/", 10)
|
||||
if len(parts) >= 4 && parts[1] == "certificates" {
|
||||
domain := strings.TrimSuffix(parts[3], ".crt")
|
||||
domain = strings.TrimSuffix(domain, ".key")
|
||||
domain = strings.TrimSuffix(domain, ".json")
|
||||
issuerKey := parts[2]
|
||||
return ConfigmapStoreCertficatesPrefix + fastHash([]byte(issuerKey+domain))
|
||||
if strings.HasPrefix(key, "certificates/") {
|
||||
parts := strings.Split(key, "/")
|
||||
if len(parts) >= 4 && parts[0] == "certificates" {
|
||||
domain := parts[2]
|
||||
issuerKey := parts[1]
|
||||
return ConfigmapStoreCertficatesPrefix + fastHash([]byte(issuerKey+domain))
|
||||
}
|
||||
}
|
||||
return ConfigmapStoreDefaultName
|
||||
}
|
||||
|
||||
@@ -39,22 +39,29 @@ func TestGetConfigmapStoreNameByKey(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "certificate crt",
|
||||
key: "/certificates/issuerKey/domain.crt",
|
||||
key: "certificates/issuerKey/domain/domain.crt",
|
||||
expected: "higress-cert-store-certificates-" + fastHash([]byte("issuerKey"+"domain")),
|
||||
},
|
||||
|
||||
{
|
||||
name: "47.237.14.136.sslip.io crt",
|
||||
key: "certificates/acme-v02.api.letsencrypt.org-directory/47.237.14.136.sslip.io/47.237.14.136.sslip.io.crt",
|
||||
expected: "higress-cert-store-certificates-" + fastHash([]byte("acme-v02.api.letsencrypt.org-directory"+"47.237.14.136.sslip.io")),
|
||||
},
|
||||
|
||||
{
|
||||
name: "certificate meta",
|
||||
key: "/certificates/issuerKey/domain.json",
|
||||
key: "certificates/issuerKey/domain/domain.json",
|
||||
expected: "higress-cert-store-certificates-" + fastHash([]byte("issuerKey"+"domain")),
|
||||
},
|
||||
{
|
||||
name: "certificate key",
|
||||
key: "/certificates/issuerKey/domain.key",
|
||||
key: "certificates/issuerKey/domain/domain.key",
|
||||
expected: "higress-cert-store-certificates-" + fastHash([]byte("issuerKey"+"domain")),
|
||||
},
|
||||
{
|
||||
name: "user key",
|
||||
key: "/users/hello/2",
|
||||
key: "users/hello/2",
|
||||
expected: "higress-cert-store-default",
|
||||
},
|
||||
{
|
||||
@@ -82,7 +89,7 @@ func TestExists(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Store a test key
|
||||
testKey := "/certificates/issuer1/domain1.crt"
|
||||
testKey := "certificates/issuer1/domain1/domain1.crt"
|
||||
err = storage.Store(context.Background(), testKey, []byte("test-data"))
|
||||
assert.NoError(t, err)
|
||||
|
||||
@@ -94,17 +101,17 @@ func TestExists(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Existing Key",
|
||||
key: "/certificates/issuer1/domain1.crt",
|
||||
key: "certificates/issuer1/domain1/domain1.crt",
|
||||
shouldExist: true,
|
||||
},
|
||||
{
|
||||
name: "Non-Existent Key1",
|
||||
key: "/certificates/issuer2/domain2.crt",
|
||||
key: "certificates/issuer2/domain2/domain2.crt",
|
||||
shouldExist: false,
|
||||
},
|
||||
{
|
||||
name: "Non-Existent Key2",
|
||||
key: "/users/hello/a",
|
||||
key: "users/hello/a",
|
||||
shouldExist: false,
|
||||
},
|
||||
// Add more test cases as needed
|
||||
@@ -129,7 +136,7 @@ func TestLoad(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Store a test key
|
||||
testKey := "/certificates/issuer1/domain1.crt"
|
||||
testKey := "certificates/issuer1/domain1/domain1.crt"
|
||||
testValue := []byte("test-data")
|
||||
err = storage.Store(context.Background(), testKey, testValue)
|
||||
assert.NoError(t, err)
|
||||
@@ -143,13 +150,13 @@ func TestLoad(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Existing Key",
|
||||
key: "/certificates/issuer1/domain1.crt",
|
||||
key: "certificates/issuer1/domain1/domain1.crt",
|
||||
expected: testValue,
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Non-Existent Key",
|
||||
key: "/certificates/issuer2/domain2.crt",
|
||||
key: "certificates/issuer2/domain2/domain2.crt",
|
||||
expected: nil,
|
||||
shouldError: true,
|
||||
},
|
||||
@@ -192,28 +199,28 @@ func TestStore(t *testing.T) {
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "Store Key with /certificates prefix",
|
||||
key: "/certificates/issuer1/domain1.crt",
|
||||
name: "Store Key with certificates prefix",
|
||||
key: "certificates/issuer1/domain1/domain1.crt",
|
||||
value: []byte("test-data1"),
|
||||
expected: map[string]string{fastHash([]byte("/certificates/issuer1/domain1.crt")): `{"k":"/certificates/issuer1/domain1.crt","v":"dGVzdC1kYXRhMQ=="}`},
|
||||
expected: map[string]string{fastHash([]byte("certificates/issuer1/domain1/domain1.crt")): `{"k":"certificates/issuer1/domain1/domain1.crt","v":"dGVzdC1kYXRhMQ=="}`},
|
||||
expectedConfigmapName: "higress-cert-store-certificates-" + fastHash([]byte("issuer1"+"domain1")),
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Store Key with /certificates prefix (additional data)",
|
||||
key: "/certificates/issuer2/domain2.crt",
|
||||
name: "Store Key with certificates prefix (additional data)",
|
||||
key: "certificates/issuer2/domain2/domain2.crt",
|
||||
value: []byte("test-data2"),
|
||||
expected: map[string]string{
|
||||
fastHash([]byte("/certificates/issuer2/domain2.crt")): `{"k":"/certificates/issuer2/domain2.crt","v":"dGVzdC1kYXRhMg=="}`,
|
||||
fastHash([]byte("certificates/issuer2/domain2/domain2.crt")): `{"k":"certificates/issuer2/domain2/domain2.crt","v":"dGVzdC1kYXRhMg=="}`,
|
||||
},
|
||||
expectedConfigmapName: "higress-cert-store-certificates-" + fastHash([]byte("issuer2"+"domain2")),
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Store Key without /certificates prefix",
|
||||
key: "/other/path/data.txt",
|
||||
name: "Store Key without certificates prefix",
|
||||
key: "other/path/data.txt",
|
||||
value: []byte("test-data3"),
|
||||
expected: map[string]string{fastHash([]byte("/other/path/data.txt")): `{"k":"/other/path/data.txt","v":"dGVzdC1kYXRhMw=="}`},
|
||||
expected: map[string]string{fastHash([]byte("other/path/data.txt")): `{"k":"other/path/data.txt","v":"dGVzdC1kYXRhMw=="}`},
|
||||
expectedConfigmapName: "higress-cert-store-default",
|
||||
shouldError: false,
|
||||
},
|
||||
@@ -256,17 +263,17 @@ func TestList(t *testing.T) {
|
||||
// Store some test data
|
||||
// Store some test data
|
||||
testKeys := []string{
|
||||
"/certificates/issuer1/domain1.crt",
|
||||
"/certificates/issuer1/domain2.crt",
|
||||
"/certificates/issuer1/domain3.crt", // Added another domain for issuer1
|
||||
"/certificates/issuer2/domain4.crt",
|
||||
"/certificates/issuer2/domain5.crt",
|
||||
"/certificates/issuer3/subdomain1/domain6.crt", // Two-level subdirectory under issuer3
|
||||
"/certificates/issuer3/subdomain1/subdomain2/domain7.crt", // Two more levels under issuer3
|
||||
"/other-prefix/key1/file1",
|
||||
"/other-prefix/key1/file2",
|
||||
"/other-prefix/key2/file3",
|
||||
"/other-prefix/key2/file4",
|
||||
"certificates/issuer1/domain1/domain1.crt",
|
||||
"certificates/issuer1/domain2/domain2.crt",
|
||||
"certificates/issuer1/domain3/domain3.crt", // Added another domain for issuer1
|
||||
"certificates/issuer2/domain4/domain4.crt",
|
||||
"certificates/issuer2/domain5/domain5.crt",
|
||||
"certificates/issuer3/domain6/domain6.crt", // Two-level subdirectory under issuer3
|
||||
"certificates/issuer3/subdomain1/subdomain2/domain7.crt", // Two more levels under issuer3
|
||||
"other-prefix/key1/file1",
|
||||
"other-prefix/key1/file2",
|
||||
"other-prefix/key2/file3",
|
||||
"other-prefix/key2/file4",
|
||||
}
|
||||
|
||||
for _, key := range testKeys {
|
||||
@@ -283,34 +290,34 @@ func TestList(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "List Certificates (Non-Recursive)",
|
||||
prefix: "/certificates",
|
||||
prefix: "certificates",
|
||||
recursive: false,
|
||||
expected: []string{"/certificates/issuer1", "/certificates/issuer2", "/certificates/issuer3"},
|
||||
expected: []string{"certificates/issuer1", "certificates/issuer2", "certificates/issuer3"},
|
||||
},
|
||||
{
|
||||
name: "List Certificates (Recursive)",
|
||||
prefix: "/certificates",
|
||||
prefix: "certificates",
|
||||
recursive: true,
|
||||
expected: []string{"/certificates/issuer1/domain1.crt", "/certificates/issuer1/domain2.crt", "/certificates/issuer1/domain3.crt", "/certificates/issuer2/domain4.crt", "/certificates/issuer2/domain5.crt", "/certificates/issuer3/subdomain1/domain6.crt", "/certificates/issuer3/subdomain1/subdomain2/domain7.crt"},
|
||||
expected: []string{"certificates/issuer1/domain1/domain1.crt", "certificates/issuer1/domain2/domain2.crt", "certificates/issuer1/domain3/domain3.crt", "certificates/issuer2/domain4/domain4.crt", "certificates/issuer2/domain5/domain5.crt", "certificates/issuer3/domain6/domain6.crt", "certificates/issuer3/subdomain1/subdomain2/domain7.crt"},
|
||||
},
|
||||
{
|
||||
name: "List Other Prefix (Non-Recursive)",
|
||||
prefix: "/other-prefix",
|
||||
prefix: "other-prefix",
|
||||
recursive: false,
|
||||
expected: []string{"/other-prefix/key1", "/other-prefix/key2"},
|
||||
expected: []string{"other-prefix/key1", "other-prefix/key2"},
|
||||
},
|
||||
|
||||
{
|
||||
name: "List Other Prefix (Non-Recursive)",
|
||||
prefix: "/other-prefix/key1",
|
||||
prefix: "other-prefix/key1",
|
||||
recursive: false,
|
||||
expected: []string{"/other-prefix/key1/file1", "/other-prefix/key1/file2"},
|
||||
expected: []string{"other-prefix/key1/file1", "other-prefix/key1/file2"},
|
||||
},
|
||||
{
|
||||
name: "List Other Prefix (Recursive)",
|
||||
prefix: "/other-prefix",
|
||||
prefix: "other-prefix",
|
||||
recursive: true,
|
||||
expected: []string{"/other-prefix/key1/file1", "/other-prefix/key1/file2", "/other-prefix/key2/file3", "/other-prefix/key2/file4"},
|
||||
expected: []string{"other-prefix/key1/file1", "other-prefix/key1/file2", "other-prefix/key2/file3", "other-prefix/key2/file4"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
package hgctl
|
||||
|
||||
const (
|
||||
yamlOutput = "yaml"
|
||||
jsonOutput = "json"
|
||||
flagsOutput = "flags"
|
||||
summaryOutput = "short"
|
||||
yamlOutput = "yaml"
|
||||
jsonOutput = "json"
|
||||
flagsOutput = "flags"
|
||||
)
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
|
||||
"github.com/alibaba/higress/cmd/hgctl/config"
|
||||
"github.com/spf13/cobra"
|
||||
"istio.io/istio/istioctl/pkg/writer/envoy/configdump"
|
||||
cmdutil "k8s.io/kubectl/pkg/cmd/util"
|
||||
)
|
||||
|
||||
@@ -49,17 +50,23 @@ func runClusterConfig(c *cobra.Command, args []string) error {
|
||||
if len(args) != 0 {
|
||||
podName = args[0]
|
||||
}
|
||||
envoyConfig, err := config.GetEnvoyConfig(&config.GetEnvoyConfigOptions{
|
||||
configWriter, err := config.GetEnvoyConfigWriter(&config.GetEnvoyConfigOptions{
|
||||
PodName: podName,
|
||||
PodNamespace: podNamespace,
|
||||
BindAddress: bindAddress,
|
||||
Output: output,
|
||||
EnvoyConfigType: config.ClusterEnvoyConfigType,
|
||||
IncludeEds: true,
|
||||
})
|
||||
}, c.OutOrStdout())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = fmt.Fprintln(c.OutOrStdout(), string(envoyConfig))
|
||||
return err
|
||||
switch output {
|
||||
case summaryOutput:
|
||||
return configWriter.PrintClusterSummary(configdump.ClusterFilter{})
|
||||
case jsonOutput, yamlOutput:
|
||||
return configWriter.PrintClusterDump(configdump.ClusterFilter{}, output)
|
||||
default:
|
||||
return fmt.Errorf("output format %q not supported", output)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ func newConfigCommand() *cobra.Command {
|
||||
flags := cfgCommand.Flags()
|
||||
options.AddKubeConfigFlags(flags)
|
||||
|
||||
cfgCommand.PersistentFlags().StringVarP(&output, "output", "o", "json", "One of 'yaml' or 'json'")
|
||||
cfgCommand.PersistentFlags().StringVarP(&output, "output", "o", "json", "Output format: one of json|yaml|short")
|
||||
cfgCommand.PersistentFlags().StringVarP(&podNamespace, "namespace", "n", "higress-system", "Namespace where envoy proxy pod are installed.")
|
||||
|
||||
return cfgCommand
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
|
||||
"github.com/alibaba/higress/cmd/hgctl/config"
|
||||
"github.com/spf13/cobra"
|
||||
"istio.io/istio/istioctl/pkg/writer/envoy/configdump"
|
||||
cmdutil "k8s.io/kubectl/pkg/cmd/util"
|
||||
)
|
||||
|
||||
@@ -49,17 +50,23 @@ func runListenerConfig(c *cobra.Command, args []string) error {
|
||||
if len(args) != 0 {
|
||||
podName = args[0]
|
||||
}
|
||||
envoyConfig, err := config.GetEnvoyConfig(&config.GetEnvoyConfigOptions{
|
||||
configWriter, err := config.GetEnvoyConfigWriter(&config.GetEnvoyConfigOptions{
|
||||
PodName: podName,
|
||||
PodNamespace: podNamespace,
|
||||
BindAddress: bindAddress,
|
||||
Output: output,
|
||||
EnvoyConfigType: config.ListenerEnvoyConfigType,
|
||||
IncludeEds: true,
|
||||
})
|
||||
}, c.OutOrStdout())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = fmt.Fprintln(c.OutOrStdout(), string(envoyConfig))
|
||||
return err
|
||||
switch output {
|
||||
case summaryOutput:
|
||||
return configWriter.PrintListenerSummary(configdump.ListenerFilter{Verbose: true})
|
||||
case jsonOutput, yamlOutput:
|
||||
return configWriter.PrintListenerDump(configdump.ListenerFilter{Verbose: true}, output)
|
||||
default:
|
||||
return fmt.Errorf("output format %q not supported", output)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
|
||||
"github.com/alibaba/higress/cmd/hgctl/config"
|
||||
"github.com/spf13/cobra"
|
||||
"istio.io/istio/istioctl/pkg/writer/envoy/configdump"
|
||||
cmdutil "k8s.io/kubectl/pkg/cmd/util"
|
||||
)
|
||||
|
||||
@@ -49,17 +50,23 @@ func runRouteConfig(c *cobra.Command, args []string) error {
|
||||
if len(args) != 0 {
|
||||
podName = args[0]
|
||||
}
|
||||
envoyConfig, err := config.GetEnvoyConfig(&config.GetEnvoyConfigOptions{
|
||||
configWriter, err := config.GetEnvoyConfigWriter(&config.GetEnvoyConfigOptions{
|
||||
PodName: podName,
|
||||
PodNamespace: podNamespace,
|
||||
BindAddress: bindAddress,
|
||||
Output: output,
|
||||
EnvoyConfigType: config.RouteEnvoyConfigType,
|
||||
IncludeEds: true,
|
||||
})
|
||||
}, c.OutOrStdout())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = fmt.Fprintln(c.OutOrStdout(), string(envoyConfig))
|
||||
return err
|
||||
switch output {
|
||||
case summaryOutput:
|
||||
return configWriter.PrintRouteSummary(configdump.RouteFilter{Verbose: true})
|
||||
case jsonOutput, yamlOutput:
|
||||
return configWriter.PrintRouteDump(configdump.RouteFilter{Verbose: true}, output)
|
||||
default:
|
||||
return fmt.Errorf("output format %q not supported", output)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -425,7 +425,7 @@ func openCommand(writer io.Writer, command string, args ...string) {
|
||||
_, err := exec.LookPath(command)
|
||||
if err != nil {
|
||||
if errors.Is(err, exec.ErrNotFound) {
|
||||
fmt.Fprintf(writer, "Could not open your browser. Please open it maually.\n")
|
||||
fmt.Fprintf(writer, "Could not open your browser. Please open it manually.\n")
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(writer, "Failed to open browser; open %s in your browser.\nError: %s\n", args[0], err.Error())
|
||||
|
||||
@@ -28,7 +28,7 @@ import (
|
||||
|
||||
const (
|
||||
setFlagHelpStr = `Override an higress profile value, e.g. to choose a profile
|
||||
(--set profile=local-k8s), or override profile values (--set gateway.replicas=2), or override helm values (--set values.global.proxy.resources.requsts.cpu=500m).`
|
||||
(--set profile=local-k8s), or override profile values (--set gateway.replicas=2), or override helm values (--set values.global.proxy.resources.requests.cpu=500m).`
|
||||
// manifestsFlagHelpStr is the command line description for --manifests
|
||||
manifestsFlagHelpStr = `Specify a path to a directory of profiles
|
||||
(e.g. ~/Downloads/higress/manifests).`
|
||||
@@ -101,7 +101,7 @@ func newInstallCmd() *cobra.Command {
|
||||
hgctl install --set profile=local-k8s --set global.enableIstioAPI=true --set gateway.replicas=2"
|
||||
|
||||
# To override helm setting
|
||||
hgctl install --set profile=local-k8s --set values.global.proxy.resources.requsts.cpu=500m"
|
||||
hgctl install --set profile=local-k8s --set values.global.proxy.resources.requests.cpu=500m"
|
||||
|
||||
|
||||
`,
|
||||
@@ -175,7 +175,7 @@ func promptInstall(writer io.Writer, profileName string) bool {
|
||||
|
||||
func promptProfileName(writer io.Writer) string {
|
||||
answer := ""
|
||||
fmt.Fprintf(writer, "\nPlease select higress install configration profile:\n")
|
||||
fmt.Fprintf(writer, "\nPlease select higress install configuration profile:\n")
|
||||
fmt.Fprintf(writer, "\n1.Install higress to local kubernetes cluster like kind etc.\n")
|
||||
fmt.Fprintf(writer, "\n2.Install higress to kubernetes cluster\n")
|
||||
fmt.Fprintf(writer, "\n3.Install higress to local docker environment\n")
|
||||
|
||||
@@ -176,7 +176,7 @@ func (a *Agent) checkSudoPermission() error {
|
||||
case <-time.After(5 * time.Second):
|
||||
cmd2.Process.Signal(os.Interrupt)
|
||||
if !a.quiet {
|
||||
fmt.Fprintf(a.writer, "checked result: timeout execeed and need sudo with password\n")
|
||||
fmt.Fprintf(a.writer, "checked result: timeout exceed and need sudo with password\n")
|
||||
}
|
||||
a.runSudoState = SudoWithPassword
|
||||
|
||||
|
||||
@@ -108,7 +108,7 @@ func upgrade(writer io.Writer, iArgs *InstallArgs) error {
|
||||
func promptUpgrade(writer io.Writer) bool {
|
||||
answer := ""
|
||||
for {
|
||||
fmt.Fprintf(writer, "All Higress resources will be upgraed from the cluster. \nProceed? (y/N)")
|
||||
fmt.Fprintf(writer, "All Higress resources will be upgrade from the cluster. \nProceed? (y/N)")
|
||||
fmt.Scanln(&answer)
|
||||
if strings.TrimSpace(answer) == "y" {
|
||||
fmt.Fprintf(writer, "\n")
|
||||
@@ -170,7 +170,7 @@ func promptProfileContexts(writer io.Writer, profileContexts []*installer.Profil
|
||||
if len(profileContexts) == 1 {
|
||||
fmt.Fprintf(writer, "\nFound a profile:: ")
|
||||
} else {
|
||||
fmt.Fprintf(writer, "\nPlease select higress installed configration profiles:\n")
|
||||
fmt.Fprintf(writer, "\nPlease select higress installed configuration profiles:\n")
|
||||
}
|
||||
index := 1
|
||||
for _, profileContext := range profileContexts {
|
||||
|
||||
@@ -32,7 +32,7 @@ func ParseProtocol(s string) Protocol {
|
||||
return TCP
|
||||
case "http":
|
||||
return HTTP
|
||||
case "grpc":
|
||||
case "grpc", "triple", "tri":
|
||||
return GRPC
|
||||
case "dubbo":
|
||||
return Dubbo
|
||||
|
||||
@@ -841,6 +841,7 @@ func (m *IngressConfig) convertIstioWasmPlugin(obj *higressext.WasmPlugin) (*ext
|
||||
StructValue: rule.Config,
|
||||
}
|
||||
var matchItems []*types.Value
|
||||
// match ingress
|
||||
for _, ing := range rule.Ingress {
|
||||
matchItems = append(matchItems, &types.Value{
|
||||
Kind: &types.Value_StringValue{
|
||||
@@ -861,6 +862,7 @@ func (m *IngressConfig) convertIstioWasmPlugin(obj *higressext.WasmPlugin) (*ext
|
||||
})
|
||||
continue
|
||||
}
|
||||
// match domain
|
||||
for _, domain := range rule.Domain {
|
||||
matchItems = append(matchItems, &types.Value{
|
||||
Kind: &types.Value_StringValue{
|
||||
@@ -868,10 +870,31 @@ func (m *IngressConfig) convertIstioWasmPlugin(obj *higressext.WasmPlugin) (*ext
|
||||
},
|
||||
})
|
||||
}
|
||||
if len(matchItems) > 0 {
|
||||
v.StructValue.Fields["_match_domain_"] = &types.Value{
|
||||
Kind: &types.Value_ListValue{
|
||||
ListValue: &types.ListValue{
|
||||
Values: matchItems,
|
||||
},
|
||||
},
|
||||
}
|
||||
ruleValues = append(ruleValues, &types.Value{
|
||||
Kind: v,
|
||||
})
|
||||
continue
|
||||
}
|
||||
// match service
|
||||
for _, service := range rule.Service {
|
||||
matchItems = append(matchItems, &types.Value{
|
||||
Kind: &types.Value_StringValue{
|
||||
StringValue: service,
|
||||
},
|
||||
})
|
||||
}
|
||||
if len(matchItems) == 0 {
|
||||
return nil, fmt.Errorf("invalid match rule has no match condition, rule:%v", rule)
|
||||
}
|
||||
v.StructValue.Fields["_match_domain_"] = &types.Value{
|
||||
v.StructValue.Fields["_match_service_"] = &types.Value{
|
||||
Kind: &types.Value_ListValue{
|
||||
ListValue: &types.ListValue{
|
||||
Values: matchItems,
|
||||
@@ -918,7 +941,7 @@ func (m *IngressConfig) AddOrUpdateWasmPlugin(clusterNamespacedName util.Cluster
|
||||
Labels: map[string]string{constants.AlwaysPushLabel: "true"},
|
||||
}
|
||||
for _, f := range m.wasmPluginHandlers {
|
||||
IngressLog.Debug("WasmPlugin triggerd update")
|
||||
IngressLog.Debug("WasmPlugin triggered update")
|
||||
f(config.Config{Meta: metadata}, config.Config{Meta: metadata}, model.EventUpdate)
|
||||
}
|
||||
istioWasmPlugin, err := m.convertIstioWasmPlugin(&wasmPlugin.Spec)
|
||||
@@ -960,7 +983,7 @@ func (m *IngressConfig) DeleteWasmPlugin(clusterNamespacedName util.ClusterNames
|
||||
Labels: map[string]string{constants.AlwaysPushLabel: "true"},
|
||||
}
|
||||
for _, f := range m.wasmPluginHandlers {
|
||||
IngressLog.Debug("WasmPlugin triggerd update")
|
||||
IngressLog.Debug("WasmPlugin triggered update")
|
||||
f(config.Config{Meta: metadata}, config.Config{Meta: metadata}, model.EventDelete)
|
||||
}
|
||||
}
|
||||
@@ -987,7 +1010,7 @@ func (m *IngressConfig) AddOrUpdateMcpBridge(clusterNamespacedName util.ClusterN
|
||||
Labels: map[string]string{constants.AlwaysPushLabel: "true"},
|
||||
}
|
||||
for _, f := range m.serviceEntryHandlers {
|
||||
IngressLog.Debug("McpBridge triggerd serviceEntry update")
|
||||
IngressLog.Debug("McpBridge triggered serviceEntry update")
|
||||
f(config.Config{Meta: metadata}, config.Config{Meta: metadata}, model.EventUpdate)
|
||||
}
|
||||
}, m.localKubeClient, m.namespace)
|
||||
@@ -1042,7 +1065,7 @@ func (m *IngressConfig) AddOrUpdateHttp2Rpc(clusterNamespacedName util.ClusterNa
|
||||
}
|
||||
|
||||
func (m *IngressConfig) DeleteHttp2Rpc(clusterNamespacedName util.ClusterNamespacedName) {
|
||||
IngressLog.Infof("Http2Rpc triggerd deleted event %s", clusterNamespacedName.Name)
|
||||
IngressLog.Infof("Http2Rpc triggered deleted event %s", clusterNamespacedName.Name)
|
||||
if clusterNamespacedName.Namespace != m.namespace {
|
||||
return
|
||||
}
|
||||
@@ -1054,7 +1077,7 @@ func (m *IngressConfig) DeleteHttp2Rpc(clusterNamespacedName util.ClusterNamespa
|
||||
}
|
||||
m.mutex.Unlock()
|
||||
if hit {
|
||||
IngressLog.Infof("Http2Rpc triggerd deleted event executed %s", clusterNamespacedName.Name)
|
||||
IngressLog.Infof("Http2Rpc triggered deleted event executed %s", clusterNamespacedName.Name)
|
||||
push := func(kind config.GroupVersionKind) {
|
||||
m.XDSUpdater.ConfigUpdate(&model.PushRequest{
|
||||
Full: true,
|
||||
@@ -1160,13 +1183,13 @@ func (m *IngressConfig) constructHttp2RpcEnvoyFilter(http2rpcConfig *annotations
|
||||
IngressLog.Infof("Found http2rpc mappings %v", mappings)
|
||||
if _, exist := mappings[http2rpcConfig.Name]; !exist {
|
||||
IngressLog.Errorf("Http2RpcConfig name %s, not found Http2Rpc CRD", http2rpcConfig.Name)
|
||||
return nil, errors.New("invalid http2rpcConfig has no useable http2rpc")
|
||||
return nil, errors.New("invalid http2rpcConfig has no usable http2rpc")
|
||||
}
|
||||
http2rpcCRD := mappings[http2rpcConfig.Name]
|
||||
|
||||
if http2rpcCRD.GetDubbo() == nil {
|
||||
IngressLog.Errorf("Http2RpcConfig name %s, only support Http2Rpc CRD Dubbo Service type", http2rpcConfig.Name)
|
||||
return nil, errors.New("invalid http2rpcConfig has no useable http2rpc")
|
||||
return nil, errors.New("invalid http2rpcConfig has no usable http2rpc")
|
||||
}
|
||||
|
||||
httpRoute := route.HTTPRoute
|
||||
@@ -1293,7 +1316,7 @@ func (m *IngressConfig) constructHttp2RpcMethods(dubbo *higressv1.DubboService)
|
||||
var method = make(map[string]interface{})
|
||||
method["name"] = serviceMethod.GetServiceMethod()
|
||||
var params []interface{}
|
||||
// paramFromEntireBody is for methods with single parameter. So when paramFromEntireBody exists, we just ignore parmas.
|
||||
// paramFromEntireBody is for methods with single parameter. So when paramFromEntireBody exists, we just ignore params.
|
||||
var paramFromEntireBody = serviceMethod.GetParamFromEntireBody()
|
||||
if paramFromEntireBody != nil {
|
||||
var param = make(map[string]interface{})
|
||||
|
||||
@@ -433,6 +433,11 @@ func (c *controller) ConvertGateway(convertOptions *common.ConvertOptions, wrapp
|
||||
// If there is no matching secret, try to get it from configmap.
|
||||
secretName = httpsCredentialConfig.MatchSecretNameByDomain(rule.Host)
|
||||
secretNamespace = c.options.SystemNamespace
|
||||
namespace, secret := cert.ParseTLSSecret(secretName)
|
||||
if namespace != "" {
|
||||
secretNamespace = namespace
|
||||
secretName = secret
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -441,6 +446,11 @@ func (c *controller) ConvertGateway(convertOptions *common.ConvertOptions, wrapp
|
||||
if httpsCredentialConfig != nil {
|
||||
secretName = httpsCredentialConfig.MatchSecretNameByDomain(rule.Host)
|
||||
secretNamespace = c.options.SystemNamespace
|
||||
namespace, secret := cert.ParseTLSSecret(secretName)
|
||||
if namespace != "" {
|
||||
secretNamespace = namespace
|
||||
secretName = secret
|
||||
}
|
||||
}
|
||||
}
|
||||
if secretName == "" {
|
||||
|
||||
@@ -419,6 +419,11 @@ func (c *controller) ConvertGateway(convertOptions *common.ConvertOptions, wrapp
|
||||
// If there is no matching secret, try to get it from configmap.
|
||||
secretName = httpsCredentialConfig.MatchSecretNameByDomain(rule.Host)
|
||||
secretNamespace = c.options.SystemNamespace
|
||||
namespace, secret := cert.ParseTLSSecret(secretName)
|
||||
if namespace != "" {
|
||||
secretNamespace = namespace
|
||||
secretName = secret
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -427,6 +432,11 @@ func (c *controller) ConvertGateway(convertOptions *common.ConvertOptions, wrapp
|
||||
if httpsCredentialConfig != nil {
|
||||
secretName = httpsCredentialConfig.MatchSecretNameByDomain(rule.Host)
|
||||
secretNamespace = c.options.SystemNamespace
|
||||
namespace, secret := cert.ParseTLSSecret(secretName)
|
||||
if namespace != "" {
|
||||
secretNamespace = namespace
|
||||
secretName = secret
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ GO_VERSION ?= 1.19
|
||||
TINYGO_VERSION ?= 0.28.1
|
||||
ORAS_VERSION ?= 1.0.0
|
||||
HIGRESS_VERSION ?= 1.0.0-rc
|
||||
USE_HIGRESS_TINYGO ?= true
|
||||
USE_HIGRESS_TINYGO ?= false
|
||||
BUILDER ?= ${BUILDER_REGISTRY}wasm-go-builder:go${GO_VERSION}-tinygo${TINYGO_VERSION}-oras${ORAS_VERSION}
|
||||
BUILD_TIME := $(shell date "+%Y%m%d-%H%M%S")
|
||||
COMMIT_ID := $(shell git rev-parse --short HEAD 2>/dev/null)
|
||||
|
||||
1
plugins/wasm-go/extensions/ai-cache/.buildrc
Normal file
1
plugins/wasm-go/extensions/ai-cache/.buildrc
Normal file
@@ -0,0 +1 @@
|
||||
EXTRA_TAGS=proxy_wasm_version_0_2_100
|
||||
@@ -32,3 +32,15 @@ redis:
|
||||
serviceName: my-redis.dns
|
||||
timeout: 2000
|
||||
```
|
||||
|
||||
## 进阶用法
|
||||
|
||||
当前默认的缓存 key 是基于 GJSON PATH 的表达式:`messages.@reverse.0.content` 提取,含义是把 messages 数组反转后取第一项的 content;
|
||||
|
||||
GJSON PATH 支持条件判断语法,例如希望取最后一个 role 为 user 的 content 作为 key,可以写成: `messages.@reverse.#(role=="user").content`;
|
||||
|
||||
如果希望将所有 role 为 user 的 content 拼成一个数组作为 key,可以写成:`messages.@reverse.#(role=="user")#.content`;
|
||||
|
||||
还可以支持管道语法,例如希望取到数第二个 role 为 user 的 content 作为 key,可以写成:`messages.@reverse.#(role=="user")#.content|1`。
|
||||
|
||||
更多用法可以参考[官方文档](https://github.com/tidwall/gjson/blob/master/SYNTAX.md),可以使用 [GJSON Playground](https://gjson.dev/) 进行语法测试。
|
||||
|
||||
@@ -8,7 +8,7 @@ replace github.com/alibaba/higress/plugins/wasm-go => ../..
|
||||
|
||||
require (
|
||||
github.com/alibaba/higress/plugins/wasm-go v1.3.6-0.20240528060522-53bccf89f441
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f
|
||||
github.com/tidwall/gjson v1.14.3
|
||||
github.com/tidwall/resp v0.1.1
|
||||
github.com/tidwall/sjson v1.2.5
|
||||
|
||||
@@ -5,6 +5,7 @@ github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 h1:IHDghbG
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520/go.mod h1:Nz8ORLaFiLWotg6GeKlJMhv8cci8mM43uEnLA5t8iew=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc h1:t2AT8zb6N/59Y78lyRWedVoVWHNRSCBh0oWCC+bluTQ=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo=
|
||||
github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
|
||||
@@ -222,9 +222,9 @@ func onHttpRequestBody(ctx wrapper.HttpContext, config PluginConfig, body []byte
|
||||
log.Debugf("cache hit, key:%s", key)
|
||||
ctx.SetContext(CacheKeyContextKey, nil)
|
||||
if !stream {
|
||||
proxywasm.SendHttpResponse(200, [][2]string{{"content-type", "application/json; charset=utf-8"}}, []byte(fmt.Sprintf(config.ReturnResponseTemplate, response.String())), -1)
|
||||
proxywasm.SendHttpResponseWithDetail(200, "ai-cache.hit", [][2]string{{"content-type", "application/json; charset=utf-8"}}, []byte(fmt.Sprintf(config.ReturnResponseTemplate, response.String())), -1)
|
||||
} else {
|
||||
proxywasm.SendHttpResponse(200, [][2]string{{"content-type", "text/event-stream; charset=utf-8"}}, []byte(fmt.Sprintf(config.ReturnStreamResponseTemplate, response.String())), -1)
|
||||
proxywasm.SendHttpResponseWithDetail(200, "ai-cache.hit", [][2]string{{"content-type", "text/event-stream; charset=utf-8"}}, []byte(fmt.Sprintf(config.ReturnStreamResponseTemplate, response.String())), -1)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
3
plugins/wasm-go/extensions/ai-prompt-decorator/.gitignore
vendored
Normal file
3
plugins/wasm-go/extensions/ai-prompt-decorator/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
config.yaml
|
||||
main.wasm
|
||||
tmp/
|
||||
69
plugins/wasm-go/extensions/ai-prompt-decorator/README.md
Normal file
69
plugins/wasm-go/extensions/ai-prompt-decorator/README.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# 简介
|
||||
AI提示词装饰器插件,支持在LLM的请求前后插入prompt。
|
||||
|
||||
# 配置说明
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------|
|
||||
| `prepend` | array of message object | optional | - | 在初始输入之前插入的语句 |
|
||||
| `append` | array of message object | optional | - | 在初始输入之后插入的语句 |
|
||||
|
||||
message object 配置说明:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------|
|
||||
| `role` | string | 必填 | - | 角色 |
|
||||
| `content` | string | 必填 | - | 消息 |
|
||||
|
||||
# 示例
|
||||
|
||||
配置示例如下:
|
||||
|
||||
```yaml
|
||||
prepend:
|
||||
- role: system
|
||||
content: "请使用英语回答问题"
|
||||
append:
|
||||
- role: user
|
||||
content: "每次回答完问题,尝试进行反问"
|
||||
```
|
||||
|
||||
使用以上配置发起请求:
|
||||
|
||||
```bash
|
||||
curl http://localhost/test \
|
||||
-H "content-type: application/json" \
|
||||
-d '{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你是谁?"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
经过插件处理后,实际请求为:
|
||||
|
||||
```bash
|
||||
curl http://localhost/test \
|
||||
-H "content-type: application/json" \
|
||||
-d '{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "请使用英语回答问题"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你是谁?"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "每次回答完问题,尝试进行反问"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
21
plugins/wasm-go/extensions/ai-prompt-decorator/go.mod
Normal file
21
plugins/wasm-go/extensions/ai-prompt-decorator/go.mod
Normal file
@@ -0,0 +1,21 @@
|
||||
module ai-prompt-decorator
|
||||
|
||||
go 1.18
|
||||
|
||||
replace github.com/alibaba/higress/plugins/wasm-go => ../..
|
||||
|
||||
require (
|
||||
github.com/alibaba/higress/plugins/wasm-go v1.3.5
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f
|
||||
github.com/tidwall/gjson v1.14.3
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 // indirect
|
||||
github.com/magefile/mage v1.14.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/resp v0.1.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5
|
||||
)
|
||||
23
plugins/wasm-go/extensions/ai-prompt-decorator/go.sum
Normal file
23
plugins/wasm-go/extensions/ai-prompt-decorator/go.sum
Normal file
@@ -0,0 +1,23 @@
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 h1:IHDghbGQ2DTIXHBHxWfqCYQW1fKjyJ/I7W1pMyUDeEA=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520/go.mod h1:Nz8ORLaFiLWotg6GeKlJMhv8cci8mM43uEnLA5t8iew=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f h1:ZIiIBRvIw62gA5MJhuwp1+2wWbqL9IGElQ499rUsYYg=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo=
|
||||
github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw=
|
||||
github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/resp v0.1.1 h1:Ly20wkhqKTmDUPlyM1S7pWo5kk0tDu8OoC/vFArXmwE=
|
||||
github.com/tidwall/resp v0.1.1/go.mod h1:3/FrruOBAxPTPtundW0VXgmsQ4ZBA0Aw714lVYgwFa0=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
80
plugins/wasm-go/extensions/ai-prompt-decorator/main.go
Normal file
80
plugins/wasm-go/extensions/ai-prompt-decorator/main.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
func main() {
|
||||
wrapper.SetCtx(
|
||||
"ai-prompt-decorator",
|
||||
wrapper.ParseConfigBy(parseConfig),
|
||||
wrapper.ProcessRequestHeadersBy(onHttpRequestHeaders),
|
||||
wrapper.ProcessRequestBodyBy(onHttpRequestBody),
|
||||
)
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type AIPromptDecoratorConfig struct {
|
||||
Prepend []Message `json:"prepend"`
|
||||
Append []Message `json:"append"`
|
||||
}
|
||||
|
||||
func parseConfig(jsonConfig gjson.Result, config *AIPromptDecoratorConfig, log wrapper.Log) error {
|
||||
return json.Unmarshal([]byte(jsonConfig.Raw), config)
|
||||
}
|
||||
|
||||
func onHttpRequestHeaders(ctx wrapper.HttpContext, config AIPromptDecoratorConfig, log wrapper.Log) types.Action {
|
||||
proxywasm.RemoveHttpRequestHeader("content-length")
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
func onHttpRequestBody(ctx wrapper.HttpContext, config AIPromptDecoratorConfig, body []byte, log wrapper.Log) types.Action {
|
||||
messageJson := `{"messages":[]}`
|
||||
|
||||
for _, entry := range config.Prepend {
|
||||
msg, err := json.Marshal(entry)
|
||||
if err != nil {
|
||||
log.Errorf("Failed to add prepend message, error: %v", err)
|
||||
return types.ActionContinue
|
||||
}
|
||||
messageJson, _ = sjson.SetRaw(messageJson, "messages.-1", string(msg))
|
||||
}
|
||||
|
||||
rawMessage := gjson.GetBytes(body, "messages")
|
||||
if !rawMessage.Exists() {
|
||||
log.Errorf("Cannot find messages field in request body")
|
||||
return types.ActionContinue
|
||||
}
|
||||
for _, entry := range rawMessage.Array() {
|
||||
messageJson, _ = sjson.SetRaw(messageJson, "messages.-1", entry.Raw)
|
||||
}
|
||||
|
||||
for _, entry := range config.Append {
|
||||
msg, err := json.Marshal(entry)
|
||||
if err != nil {
|
||||
log.Errorf("Failed to add prepend message, error: %v", err)
|
||||
return types.ActionContinue
|
||||
}
|
||||
messageJson, _ = sjson.SetRaw(messageJson, "messages.-1", string(msg))
|
||||
}
|
||||
|
||||
newbody, err := sjson.SetRaw(string(body), "messages", gjson.Get(messageJson, "messages").Raw)
|
||||
if err != nil {
|
||||
log.Error("modify body failed")
|
||||
}
|
||||
if err = proxywasm.ReplaceHttpRequestBody([]byte(newbody)); err != nil {
|
||||
log.Error("rewrite body failed")
|
||||
}
|
||||
|
||||
return types.ActionContinue
|
||||
}
|
||||
3
plugins/wasm-go/extensions/ai-prompt-template/.gitignore
vendored
Normal file
3
plugins/wasm-go/extensions/ai-prompt-template/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
config.yaml
|
||||
main.wasm
|
||||
tmp/
|
||||
48
plugins/wasm-go/extensions/ai-prompt-template/README.md
Normal file
48
plugins/wasm-go/extensions/ai-prompt-template/README.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# 简介
|
||||
AI提示词模板,用于快速构建同类型的AI请求。
|
||||
|
||||
# 配置说明
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------|
|
||||
| `templates` | array of object | 必填 | - | 模板设置 |
|
||||
|
||||
template object 配置说明:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------|
|
||||
| `name` | string | 必填 | - | 模板名称 |
|
||||
| `template.model` | string | 必填 | - | 模型名称 |
|
||||
| `template.messages` | array of object | 必填 | - | 大模型输入 |
|
||||
|
||||
message object 配置说明:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------|
|
||||
| `role` | string | 必填 | - | 角色 |
|
||||
| `content` | string | 必填 | - | 消息 |
|
||||
|
||||
配置示例如下:
|
||||
|
||||
```yaml
|
||||
templates:
|
||||
- name: "developer-chat"
|
||||
template:
|
||||
model: gpt-3.5-turbo
|
||||
messages:
|
||||
- role: system
|
||||
content: "You are a {{program}} expert, in {{language}} programming language."
|
||||
- role: user
|
||||
content: "Write me a {{program}} program."
|
||||
```
|
||||
|
||||
使用以上配置的请求body示例:
|
||||
|
||||
```json
|
||||
{
|
||||
"template": "developer-chat",
|
||||
"properties": {
|
||||
"program": "quick sort",
|
||||
"language": "python"
|
||||
}
|
||||
}
|
||||
```
|
||||
21
plugins/wasm-go/extensions/ai-prompt-template/go.mod
Normal file
21
plugins/wasm-go/extensions/ai-prompt-template/go.mod
Normal file
@@ -0,0 +1,21 @@
|
||||
module ai-prompt-template
|
||||
|
||||
go 1.18
|
||||
|
||||
replace github.com/alibaba/higress/plugins/wasm-go => ../..
|
||||
|
||||
require (
|
||||
github.com/alibaba/higress/plugins/wasm-go v1.3.5
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f
|
||||
github.com/tidwall/gjson v1.14.3
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 // indirect
|
||||
github.com/magefile/mage v1.14.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/resp v0.1.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5
|
||||
)
|
||||
26
plugins/wasm-go/extensions/ai-prompt-template/go.sum
Normal file
26
plugins/wasm-go/extensions/ai-prompt-template/go.sum
Normal file
@@ -0,0 +1,26 @@
|
||||
github.com/alibaba/higress/plugins/wasm-go v1.3.5 h1:VOLL3m442IHCSu8mR5AZ4sc6LVT9X0w1hdqDI7oB9jY=
|
||||
github.com/alibaba/higress/plugins/wasm-go v1.3.5/go.mod h1:kr3V9Ntbspj1eSrX8rgjBsdMXkGupYEf+LM72caGPQc=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 h1:IHDghbGQ2DTIXHBHxWfqCYQW1fKjyJ/I7W1pMyUDeEA=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520/go.mod h1:Nz8ORLaFiLWotg6GeKlJMhv8cci8mM43uEnLA5t8iew=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240226064518-b3dc4646a35a h1:luYRvxLTE1xYxrXYj7nmjd1U0HHh8pUPiKfdZ0MhCGE=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240226064518-b3dc4646a35a/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo=
|
||||
github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw=
|
||||
github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/resp v0.1.1 h1:Ly20wkhqKTmDUPlyM1S7pWo5kk0tDu8OoC/vFArXmwE=
|
||||
github.com/tidwall/resp v0.1.1/go.mod h1:3/FrruOBAxPTPtundW0VXgmsQ4ZBA0Aw714lVYgwFa0=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
55
plugins/wasm-go/extensions/ai-prompt-template/main.go
Normal file
55
plugins/wasm-go/extensions/ai-prompt-template/main.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func main() {
|
||||
wrapper.SetCtx(
|
||||
"ai-prompt-template",
|
||||
wrapper.ParseConfigBy(parseConfig),
|
||||
wrapper.ProcessRequestHeadersBy(onHttpRequestHeaders),
|
||||
wrapper.ProcessRequestBodyBy(onHttpRequestBody),
|
||||
)
|
||||
}
|
||||
|
||||
type AIPromptTemplateConfig struct {
|
||||
templates map[string]string
|
||||
}
|
||||
|
||||
func parseConfig(json gjson.Result, config *AIPromptTemplateConfig, log wrapper.Log) error {
|
||||
config.templates = make(map[string]string)
|
||||
for _, v := range json.Get("templates").Array() {
|
||||
config.templates[v.Get("name").String()] = v.Get("template").Raw
|
||||
log.Info(v.Get("template").Raw)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func onHttpRequestHeaders(ctx wrapper.HttpContext, config AIPromptTemplateConfig, log wrapper.Log) types.Action {
|
||||
templateEnable, _ := proxywasm.GetHttpRequestHeader("template-enable")
|
||||
if templateEnable != "true" {
|
||||
ctx.DontReadRequestBody()
|
||||
return types.ActionContinue
|
||||
}
|
||||
proxywasm.RemoveHttpRequestHeader("content-length")
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
func onHttpRequestBody(ctx wrapper.HttpContext, config AIPromptTemplateConfig, body []byte, log wrapper.Log) types.Action {
|
||||
if gjson.GetBytes(body, "template").Exists() && gjson.GetBytes(body, "properties").Exists() {
|
||||
name := gjson.GetBytes(body, "template").String()
|
||||
template := config.templates[name]
|
||||
for key, value := range gjson.GetBytes(body, "properties").Map() {
|
||||
template = strings.ReplaceAll(template, fmt.Sprintf("{{%s}}", key), value.String())
|
||||
}
|
||||
proxywasm.ReplaceHttpRequestBody([]byte(template))
|
||||
}
|
||||
return types.ActionContinue
|
||||
}
|
||||
1
plugins/wasm-go/extensions/ai-proxy/.buildrc
Normal file
1
plugins/wasm-go/extensions/ai-proxy/.buildrc
Normal file
@@ -0,0 +1 @@
|
||||
EXTRA_TAGS=proxy_wasm_version_0_2_100
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: AI 代理
|
||||
keywords: [ higress,ai,proxy,rag ]
|
||||
keywords: [ AI网关, AI代理 ]
|
||||
description: AI 代理插件配置参考
|
||||
---
|
||||
|
||||
@@ -9,6 +9,13 @@ description: AI 代理插件配置参考
|
||||
`AI 代理`插件实现了基于 OpenAI API 契约的 AI 代理功能。目前支持 OpenAI、Azure OpenAI、月之暗面(Moonshot)和通义千问等 AI
|
||||
服务提供商。
|
||||
|
||||
> **注意:**
|
||||
|
||||
> 请求路径后缀匹配 `/v1/chat/completions` 时,对应文生文场景,会用 OpenAI 的文生文协议解析请求 Body,再转换为对应 LLM 厂商的文生文协议
|
||||
|
||||
> 请求路径后缀匹配 `/v1/embeddings` 时,对应文本向量场景,会用 OpenAI 的文本向量协议解析请求 Body,再转换为对应 LLM 厂商的文本向量协议
|
||||
|
||||
|
||||
## 配置字段
|
||||
|
||||
### 基本配置
|
||||
@@ -19,14 +26,14 @@ description: AI 代理插件配置参考
|
||||
|
||||
`provider`的配置字段说明如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|----------------|-----------------|------|-----|----------------------------------------------------------------------------------|
|
||||
| `type` | string | 必填 | - | AI 服务提供商名称。目前支持以下取值:openai, azure, moonshot, qwen, zhipuai |
|
||||
| `apiTokens` | array of string | 必填 | - | 用于在访问 AI 服务时进行认证的令牌。如果配置了多个 token,插件会在请求时随机进行选择。部分服务提供商只支持配置一个 token。 |
|
||||
| `timeout` | number | 非必填 | - | 访问 AI 服务的超时时间。单位为毫秒。默认值为 120000,即 2 分钟 |
|
||||
| `modelMapping` | map of string | 非必填 | - | AI 模型映射表,用于将请求中的模型名称映射为服务提供商支持模型名称。<br/>可以使用 "*" 为键来配置通用兜底映射关系 |
|
||||
| `protocol` | string | 非必填 | - | 插件对外提供的 API 接口契约。目前支持以下取值:openai(默认值,使用 OpenAI 的接口契约)、original(使用目标服务提供商的原始接口契约) |
|
||||
| `context` | object | 非必填 | - | 配置 AI 对话上下文信息 |
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
| -------------- | --------------- | -------- | ------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `type` | string | 必填 | - | AI 服务提供商名称 |
|
||||
| `apiTokens` | array of string | 非必填 | - | 用于在访问 AI 服务时进行认证的令牌。如果配置了多个 token,插件会在请求时随机进行选择。部分服务提供商只支持配置一个 token。 |
|
||||
| `timeout` | number | 非必填 | - | 访问 AI 服务的超时时间。单位为毫秒。默认值为 120000,即 2 分钟 |
|
||||
| `modelMapping` | map of string | 非必填 | - | AI 模型映射表,用于将请求中的模型名称映射为服务提供商支持模型名称。<br/>1. 支持前缀匹配。例如用 "gpt-3-*" 匹配所有名称以“gpt-3-”开头的模型;<br/>2. 支持使用 "*" 为键来配置通用兜底映射关系;<br/>3. 如果映射的目标名称为空字符串 "",则表示保留原模型名称。 |
|
||||
| `protocol` | string | 非必填 | - | 插件对外提供的 API 接口契约。目前支持以下取值:openai(默认值,使用 OpenAI 的接口契约)、original(使用目标服务提供商的原始接口契约) |
|
||||
| `context` | object | 非必填 | - | 配置 AI 对话上下文信息 |
|
||||
|
||||
`context`的配置字段说明如下:
|
||||
|
||||
@@ -40,7 +47,12 @@ description: AI 代理插件配置参考
|
||||
|
||||
#### OpenAI
|
||||
|
||||
OpenAI 所对应的 `type` 为 `openai`。它并无特有的配置字段。
|
||||
OpenAI 所对应的 `type` 为 `openai`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|-------------------|----------|----------|--------|-------------------------------------------------------------------------------|
|
||||
| `openaiCustomUrl` | string | 非必填 | - | 基于OpenAI协议的自定义后端URL,例如: www.example.com/myai/v1/chat/completions |
|
||||
|
||||
|
||||
#### Azure OpenAI
|
||||
|
||||
@@ -89,13 +101,25 @@ DeepSeek所对应的 `type` 为 `deepseek`。它并无特有的配置字段。
|
||||
|
||||
Groq 所对应的 `type` 为 `groq`。它并无特有的配置字段。
|
||||
|
||||
#### 文心一言(Baidu)
|
||||
|
||||
文心一言所对应的 `type` 为 `baidu`。它并无特有的配置字段。
|
||||
|
||||
#### MiniMax
|
||||
|
||||
MiniMax所对应的 `type` 为 `minimax`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
| ---------------- | -------- | ------------------------------------------------------------ | ------ | ------------------------------------------------------------ |
|
||||
| `minimaxGroupId` | string | 当使用`abab6.5-chat`, `abab6.5s-chat`, `abab5.5s-chat`, `abab5.5-chat`四种模型时必填 | - | 当使用`abab6.5-chat`, `abab6.5s-chat`, `abab5.5s-chat`, `abab5.5-chat`四种模型时会使用ChatCompletion Pro,需要设置groupID |
|
||||
|
||||
#### Anthropic Claude
|
||||
|
||||
Anthropic Claude 所对应的 `type` 为 `claude`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|-----------|--------|-----|-----|-------------------|
|
||||
| `version` | string | 必填 | - | Claude 服务的 API 版本 |
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|-----------|--------|------|-----|----------------------------------|
|
||||
| `claudeVersion` | string | 可选 | - | Claude 服务的 API 版本,默认为 2023-06-01 |
|
||||
|
||||
#### Ollama
|
||||
|
||||
@@ -106,6 +130,41 @@ Ollama 所对应的 `type` 为 `ollama`。它特有的配置字段如下:
|
||||
| `ollamaServerHost` | string | 必填 | - | Ollama 服务器的主机地址 |
|
||||
| `ollamaServerPort` | number | 必填 | - | Ollama 服务器的端口号,默认为11434 |
|
||||
|
||||
#### 混元
|
||||
|
||||
混元所对应的 `type` 为 `hunyuan`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|-------------------|--------|------|-----|----------------------------------------------|
|
||||
| `hunyuanAuthId` | string | 必填 | - | 混元用于v3版本认证的id |
|
||||
| `hunyuanAuthKey` | string | 必填 | - | 混元用于v3版本认证的key |
|
||||
|
||||
#### 阶跃星辰 (Stepfun)
|
||||
|
||||
阶跃星辰所对应的 `type` 为 `stepfun`。它并无特有的配置字段。
|
||||
|
||||
#### Cloudflare Workers AI
|
||||
|
||||
Cloudflare Workers AI 所对应的 `type` 为 `cloudflare`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
|-------------------|--------|------|-----|----------------------------------------------------------------------------------------------------------------------------|
|
||||
| `cloudflareAccountId` | string | 必填 | - | [Cloudflare Account ID](https://developers.cloudflare.com/workers-ai/get-started/rest-api/#1-get-api-token-and-account-id) |
|
||||
|
||||
#### 星火 (Spark)
|
||||
|
||||
星火所对应的 `type` 为 `spark`。它并无特有的配置字段。
|
||||
|
||||
讯飞星火认知大模型的`apiTokens`字段值为`APIKey:APISecret`。即填入自己的APIKey与APISecret,并以`:`分隔。
|
||||
|
||||
#### Gemini
|
||||
|
||||
Gemini 所对应的 `type` 为 `gemini`。它特有的配置字段如下:
|
||||
|
||||
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
|
||||
| --------------------- | -------- | -------- |-----|-------------------------------------------------------------------------------------------------|
|
||||
| `geminiSafetySetting` | map of string | 非必填 | - | Gemini AI内容过滤和安全级别设定。参考[Safety settings](https://ai.google.dev/gemini-api/docs/safety-settings) |
|
||||
|
||||
## 用法示例
|
||||
|
||||
### 使用 OpenAI 协议代理 Azure OpenAI 服务
|
||||
@@ -221,25 +280,72 @@ provider:
|
||||
'gpt-3': "qwen-turbo"
|
||||
'gpt-35-turbo': "qwen-plus"
|
||||
'gpt-4-turbo': "qwen-max"
|
||||
'gpt-4-*': "qwen-max"
|
||||
'text-embedding-v1': 'text-embedding-v1'
|
||||
'*': "qwen-turbo"
|
||||
```
|
||||
|
||||
**AI 对话请求示例**
|
||||
|
||||
URL: http://your-domain/v1/chat/completions
|
||||
|
||||
请求体:
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "text-embedding-v1",
|
||||
"input": "Hello"
|
||||
}
|
||||
```
|
||||
|
||||
响应体示例:
|
||||
|
||||
```json
|
||||
{
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"object": "embedding",
|
||||
"index": 0,
|
||||
"embedding": [
|
||||
-1.0437825918197632,
|
||||
5.208984375,
|
||||
3.0483806133270264,
|
||||
-1.7897135019302368,
|
||||
-2.0107421875,
|
||||
...,
|
||||
0.8125,
|
||||
-1.1759847402572632,
|
||||
0.8174641728401184,
|
||||
1.0432943105697632,
|
||||
-0.5885213017463684
|
||||
]
|
||||
}
|
||||
],
|
||||
"model": "text-embedding-v1",
|
||||
"usage": {
|
||||
"prompt_tokens": 1,
|
||||
"total_tokens": 1
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
URL: http://your-domain/v1/embeddings
|
||||
|
||||
示例请求内容:
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-3",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"temperature": 0.3
|
||||
"model": "text-embedding-v1",
|
||||
"input": [
|
||||
"Hello world!"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
示例响应内容:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -507,6 +613,7 @@ provider:
|
||||
type: claude
|
||||
apiTokens:
|
||||
- "YOUR_CLAUDE_API_TOKEN"
|
||||
version: "2023-06-01"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
@@ -528,27 +635,383 @@ provider:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "msg_01K8iLH18FGN7Xd9deurwtoD",
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"model": "claude-3-opus-20240229",
|
||||
"stop_sequence": null,
|
||||
"usage": {
|
||||
"input_tokens": 16,
|
||||
"output_tokens": 141
|
||||
},
|
||||
"content": [
|
||||
"id": "msg_01Jt3GzyjuzymnxmZERJguLK",
|
||||
"choices": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "你好!我是Claude,一个由Anthropic公司开发的人工智能助手。我的任务是尽我所能帮助人类,比如回答问题,提供建议和意见,协助完成任务等。我掌握了很多知识,也具备一定的分析和推理能力,但我不是人类,也没有实体的身体。很高兴认识你!如果有什么需要帮助的地方,欢迎随时告诉我。"
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "您好,我是一个由人工智能公司Anthropic开发的聊天助手。我的名字叫Claude,是一个聪明友善、知识渊博的对话系统。很高兴认识您!我可以就各种话题与您聊天,回答问题,提供建议和帮助。我会尽最大努力给您有帮助的回复。希望我们能有个愉快的交流!"
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"stop_reason": "end_turn"
|
||||
"created": 1717385918,
|
||||
"model": "claude-3-opus-20240229",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"prompt_tokens": 16,
|
||||
"completion_tokens": 126,
|
||||
"total_tokens": 142
|
||||
}
|
||||
}
|
||||
```
|
||||
### 使用 OpenAI 协议代理混元服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: "hunyuan"
|
||||
hunyuanAuthKey: "<YOUR AUTH KEY>"
|
||||
apiTokens:
|
||||
- ""
|
||||
hunyuanAuthId: "<YOUR AUTH ID>"
|
||||
timeout: 1200000
|
||||
modelMapping:
|
||||
"*": "hunyuan-lite"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
请求脚本:
|
||||
```sh
|
||||
|
||||
curl --location 'http://<your higress domain>/v1/chat/completions' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"model": "gpt-3",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "你是一个名专业的开发人员!"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"temperature": 0.3,
|
||||
"stream": false
|
||||
}'
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "fd140c3e-0b69-4b19-849b-d354d32a6162",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"delta": {
|
||||
"role": "assistant",
|
||||
"content": "你好!我是一名专业的开发人员。"
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"created": 1717493117,
|
||||
"model": "hunyuan-lite",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"prompt_tokens": 15,
|
||||
"completion_tokens": 9,
|
||||
"total_tokens": 24
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 使用 OpenAI 协议代理百度文心一言服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: baidu
|
||||
apiTokens:
|
||||
- "YOUR_BAIDU_API_TOKEN"
|
||||
modelMapping:
|
||||
'gpt-3': "ERNIE-4.0"
|
||||
'*': "ERNIE-4.0"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-4-turbo",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"stream": false
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "as-e90yfg1pk1",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "你好,我是文心一言,英文名是ERNIE Bot。我能够与人对话互动,回答问题,协助创作,高效便捷地帮助人们获取信息、知识和灵感。"
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"created": 1717251488,
|
||||
"model": "ERNIE-4.0",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"prompt_tokens": 4,
|
||||
"completion_tokens": 33,
|
||||
"total_tokens": 37
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 使用 OpenAI 协议代理MiniMax服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: minimax
|
||||
apiTokens:
|
||||
- "YOUR_MINIMAX_API_TOKEN"
|
||||
modelMapping:
|
||||
"gpt-3": "abab6.5g-chat"
|
||||
"gpt-4": "abab6.5-chat"
|
||||
"*": "abab6.5g-chat"
|
||||
minimaxGroupId: "YOUR_MINIMAX_GROUP_ID"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-4-turbo",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"stream": false
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "02b2251f8c6c09d68c1743f07c72afd7",
|
||||
"choices": [
|
||||
{
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"message": {
|
||||
"content": "你好!我是MM智能助理,一款由MiniMax自研的大型语言模型。我可以帮助你解答问题,提供信息,进行对话等。有什么可以帮助你的吗?",
|
||||
"role": "assistant"
|
||||
}
|
||||
}
|
||||
],
|
||||
"created": 1717760544,
|
||||
"model": "abab6.5s-chat",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"total_tokens": 106
|
||||
},
|
||||
"input_sensitive": false,
|
||||
"output_sensitive": false,
|
||||
"input_sensitive_type": 0,
|
||||
"output_sensitive_type": 0,
|
||||
"base_resp": {
|
||||
"status_code": 0,
|
||||
"status_msg": ""
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 使用 OpenAI 协议代理 Cloudflare Workers AI 服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: cloudflare
|
||||
apiTokens:
|
||||
- "YOUR_WORKERS_AI_API_TOKEN"
|
||||
cloudflareAccountId: "YOUR_CLOUDFLARE_ACCOUNT_ID"
|
||||
modelMapping:
|
||||
"*": "@cf/meta/llama-3-8b-instruct"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-3.5",
|
||||
"max_tokens": 1024,
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Who are you?"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "id-1720367803430",
|
||||
"object": "chat.completion",
|
||||
"created": 1720367803,
|
||||
"model": "@cf/meta/llama-3-8b-instruct",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational manner. I'm not a human, but a computer program designed to simulate conversation and answer questions to the best of my knowledge. I can be used to generate text on a wide range of topics, from science and history to entertainment and culture.\n\nI'm a large language model, which means I've been trained on a massive dataset of text from the internet and can generate human-like responses. I can understand natural language and respond accordingly, making me suitable for tasks such as:\n\n* Answering questions on various topics\n* Generating text based on a given prompt\n* Translating text from one language to another\n* Summarizing long pieces of text\n* Creating chatbot dialogues\n\nI'm constantly learning and improving, so the more conversations I have with users like you, the better I'll become."
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 使用 OpenAI 协议代理Spark服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: spark
|
||||
apiTokens:
|
||||
- "APIKey:APISecret"
|
||||
modelMapping:
|
||||
"gpt-4o": "generalv3.5"
|
||||
"gpt-4": "generalv3"
|
||||
"*": "general"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "你是一名专业的开发人员!"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"stream": false
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "cha000c23c6@dx190ef0b4b96b8f2532",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "你好!我是一名专业的开发人员,擅长编程和解决技术问题。有什么我可以帮助你的吗?"
|
||||
}
|
||||
}
|
||||
],
|
||||
"created": 1721997415,
|
||||
"model": "generalv3.5",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"prompt_tokens": 10,
|
||||
"completion_tokens": 19,
|
||||
"total_tokens": 29
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 使用 OpenAI 协议代理 gemini 服务
|
||||
|
||||
**配置信息**
|
||||
|
||||
```yaml
|
||||
provider:
|
||||
type: gemini
|
||||
apiTokens:
|
||||
- "YOUR_GEMINI_API_TOKEN"
|
||||
modelMapping:
|
||||
"*": "gemini-pro"
|
||||
geminiSafetySetting:
|
||||
"HARM_CATEGORY_SEXUALLY_EXPLICIT" :"BLOCK_NONE"
|
||||
"HARM_CATEGORY_HATE_SPEECH" :"BLOCK_NONE"
|
||||
"HARM_CATEGORY_HARASSMENT" :"BLOCK_NONE"
|
||||
"HARM_CATEGORY_DANGEROUS_CONTENT" :"BLOCK_NONE"
|
||||
```
|
||||
|
||||
**请求示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-3.5",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Who are you?"
|
||||
}
|
||||
],
|
||||
"stream": false
|
||||
}
|
||||
```
|
||||
|
||||
**响应示例**
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "chatcmpl-b010867c-0d3f-40ba-95fd-4e8030551aeb",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "I am a large multi-modal model, trained by Google. I am designed to provide information and answer questions to the best of my abilities."
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"created": 1722756984,
|
||||
"model": "gemini-pro",
|
||||
"object": "chat.completion",
|
||||
"usage": {
|
||||
"prompt_tokens": 5,
|
||||
"completion_tokens": 29,
|
||||
"total_tokens": 34
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 完整配置示例
|
||||
|
||||
### Kubernetes 示例
|
||||
|
||||
以下以使用 OpenAI 协议代理 Groq 服务为例,展示完整的插件配置示例。
|
||||
|
||||
```yaml
|
||||
@@ -619,4 +1082,131 @@ curl "http://<YOUR-DOMAIN>/v1/chat/completions" -H "Content-Type: application/js
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
```
|
||||
|
||||
### Docker-Compose 示例
|
||||
|
||||
`docker-compose.yml` 配置文件:
|
||||
|
||||
```yaml
|
||||
version: '3.7'
|
||||
services:
|
||||
envoy:
|
||||
image: higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/envoy:1.20
|
||||
entrypoint: /usr/local/bin/envoy
|
||||
# 开启了 debug 级别日志方便调试
|
||||
command: -c /etc/envoy/envoy.yaml --component-log-level wasm:debug
|
||||
networks:
|
||||
- higress-net
|
||||
ports:
|
||||
- "10000:10000"
|
||||
volumes:
|
||||
- ./envoy.yaml:/etc/envoy/envoy.yaml
|
||||
- ./plugin.wasm:/etc/envoy/plugin.wasm
|
||||
networks:
|
||||
higress-net: {}
|
||||
```
|
||||
|
||||
`envoy.yaml` 配置文件:
|
||||
|
||||
```yaml
|
||||
admin:
|
||||
address:
|
||||
socket_address:
|
||||
protocol: TCP
|
||||
address: 0.0.0.0
|
||||
port_value: 9901
|
||||
static_resources:
|
||||
listeners:
|
||||
- name: listener_0
|
||||
address:
|
||||
socket_address:
|
||||
protocol: TCP
|
||||
address: 0.0.0.0
|
||||
port_value: 10000
|
||||
filter_chains:
|
||||
- filters:
|
||||
- name: envoy.filters.network.http_connection_manager
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
|
||||
scheme_header_transformation:
|
||||
scheme_to_overwrite: https
|
||||
stat_prefix: ingress_http
|
||||
# Output envoy logs to stdout
|
||||
access_log:
|
||||
- name: envoy.access_loggers.stdout
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog
|
||||
# Modify as required
|
||||
route_config:
|
||||
name: local_route
|
||||
virtual_hosts:
|
||||
- name: local_service
|
||||
domains: [ "*" ]
|
||||
routes:
|
||||
- match:
|
||||
prefix: "/"
|
||||
route:
|
||||
cluster: claude
|
||||
timeout: 300s
|
||||
http_filters:
|
||||
- name: claude
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/udpa.type.v1.TypedStruct
|
||||
type_url: type.googleapis.com/envoy.extensions.filters.http.wasm.v3.Wasm
|
||||
value:
|
||||
config:
|
||||
name: claude
|
||||
vm_config:
|
||||
runtime: envoy.wasm.runtime.v8
|
||||
code:
|
||||
local:
|
||||
filename: /etc/envoy/plugin.wasm
|
||||
configuration:
|
||||
"@type": "type.googleapis.com/google.protobuf.StringValue"
|
||||
value: | # 插件配置
|
||||
{
|
||||
"provider": {
|
||||
"type": "claude",
|
||||
"apiTokens": [
|
||||
"YOUR_API_TOKEN"
|
||||
]
|
||||
}
|
||||
}
|
||||
- name: envoy.filters.http.router
|
||||
clusters:
|
||||
- name: claude
|
||||
connect_timeout: 30s
|
||||
type: LOGICAL_DNS
|
||||
dns_lookup_family: V4_ONLY
|
||||
lb_policy: ROUND_ROBIN
|
||||
load_assignment:
|
||||
cluster_name: claude
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: api.anthropic.com # API 服务地址
|
||||
port_value: 443
|
||||
transport_socket:
|
||||
name: envoy.transport_sockets.tls
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext
|
||||
"sni": "api.anthropic.com"
|
||||
```
|
||||
|
||||
访问示例:
|
||||
|
||||
```bash
|
||||
curl "http://localhost:10000/v1/chat/completions" -H "Content-Type: application/json" -d '{
|
||||
"model": "claude-3-opus-20240229",
|
||||
"max_tokens": 1024,
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
@@ -13,6 +13,52 @@ DOCKER_BUILDKIT=1; docker build --build-arg PLUGIN_NAME=ai-proxy --build-arg EXT
|
||||
```powershell
|
||||
$env:DOCKER_BUILDKIT=1; docker build --build-arg PLUGIN_NAME=ai-proxy --build-arg EXTRA_TAGS=proxy_wasm_version_0_2_100 --build-arg BUILDER=higress-registry.cn-hangzhou.cr.aliyuncs.com/plugins/wasm-go-builder:go1.19-tinygo0.28.1-oras1.0.0 -t ai-proxy:0.0.1 --output .\out ..\..
|
||||
```
|
||||
|
||||
## 本地运行
|
||||
参考:https://higress.io/zh-cn/docs/user/wasm-go
|
||||
需要注意的是,higress/plugins/wasm-go/extensions/ai-proxy/envoy.yaml中的clusters字段,记得改成你需要地址,比如混元的话:就会有如下的一个cluster的配置:
|
||||
```yaml
|
||||
<省略>
|
||||
static_resources:
|
||||
<省略>
|
||||
clusters:
|
||||
load_assignment:
|
||||
cluster_name: moonshot
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: hunyuan.tencentcloudapi.com
|
||||
port_value: 443
|
||||
transport_socket:
|
||||
name: envoy.transport_sockets.tls
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext
|
||||
"sni": "hunyuan.tencentcloudapi.com"
|
||||
```
|
||||
|
||||
而后你就可以在本地的pod中查看相应的输出,请求样例如下:
|
||||
```sh
|
||||
curl --location 'http://127.0.0.1:10000/v1/chat/completions' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data '{
|
||||
"model": "gpt-3",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "你是一个名专业的开发人员!"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "你好,你是谁?"
|
||||
}
|
||||
],
|
||||
"temperature": 0.3,
|
||||
"stream": false
|
||||
}'
|
||||
```
|
||||
|
||||
## 测试须知
|
||||
|
||||
由于 `ai-proxy` 插件使用了 Higress 对数据面定制的特殊功能,因此在测试时需要使用版本不低于 1.4.0-rc.1 的 Higress Gateway 镜像。
|
||||
@@ -8,7 +8,7 @@ replace github.com/alibaba/higress/plugins/wasm-go => ../..
|
||||
|
||||
require (
|
||||
github.com/alibaba/higress/plugins/wasm-go v0.0.0
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/tidwall/gjson v1.14.3
|
||||
)
|
||||
|
||||
@@ -4,8 +4,8 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 h1:IHDghbGQ2DTIXHBHxWfqCYQW1fKjyJ/I7W1pMyUDeEA=
|
||||
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520/go.mod h1:Nz8ORLaFiLWotg6GeKlJMhv8cci8mM43uEnLA5t8iew=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc h1:t2AT8zb6N/59Y78lyRWedVoVWHNRSCBh0oWCC+bluTQ=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240327114451-d6b7174a84fc/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f h1:ZIiIBRvIw62gA5MJhuwp1+2wWbqL9IGElQ499rUsYYg=
|
||||
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
|
||||
github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo=
|
||||
github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
|
||||
@@ -21,6 +21,8 @@ const (
|
||||
pluginName = "ai-proxy"
|
||||
|
||||
ctxKeyApiName = "apiKey"
|
||||
|
||||
defaultMaxBodyBytes uint32 = 10 * 1024 * 1024
|
||||
)
|
||||
|
||||
func main() {
|
||||
@@ -61,10 +63,10 @@ func onHttpRequestHeader(ctx wrapper.HttpContext, pluginConfig config.PluginConf
|
||||
|
||||
rawPath := ctx.Path()
|
||||
path, _ := url.Parse(rawPath)
|
||||
apiName := getApiName(path.Path)
|
||||
apiName := getOpenAiApiName(path.Path)
|
||||
if apiName == "" {
|
||||
log.Debugf("[onHttpRequestHeader] unsupported path: %s", path.Path)
|
||||
_ = util.SendResponse(404, util.MimeTypeTextPlain, "API not found: "+path.Path)
|
||||
_ = util.SendResponse(404, "ai-proxy.unknown_api", util.MimeTypeTextPlain, "API not found: "+path.Path)
|
||||
return types.ActionContinue
|
||||
}
|
||||
ctx.SetContext(ctxKeyApiName, apiName)
|
||||
@@ -75,16 +77,18 @@ func onHttpRequestHeader(ctx wrapper.HttpContext, pluginConfig config.PluginConf
|
||||
|
||||
action, err := handler.OnRequestHeaders(ctx, apiName, log)
|
||||
if err == nil {
|
||||
if contentType, err := proxywasm.GetHttpRequestHeader("Content-Type"); err == nil && contentType != "" {
|
||||
ctx.SetRequestBodyBufferLimit(defaultMaxBodyBytes)
|
||||
// Always return types.HeaderStopIteration to support fallback routing,
|
||||
// as long as onHttpRequestBody can be called.
|
||||
return types.HeaderStopIteration
|
||||
}
|
||||
return action
|
||||
}
|
||||
_ = util.SendResponse(404, util.MimeTypeTextPlain, fmt.Sprintf("failed to process request headers: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.proc_req_headers_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to process request headers: %v", err))
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
if _, needHandleBody := activeProvider.(provider.RequestBodyHandler); needHandleBody {
|
||||
ctx.DontReadRequestBody()
|
||||
}
|
||||
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
@@ -99,18 +103,24 @@ func onHttpRequestBody(ctx wrapper.HttpContext, pluginConfig config.PluginConfig
|
||||
log.Debugf("[onHttpRequestBody] provider=%s", activeProvider.GetProviderType())
|
||||
|
||||
if handler, ok := activeProvider.(provider.RequestBodyHandler); ok {
|
||||
apiName := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
apiName, _ := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
action, err := handler.OnRequestBody(ctx, apiName, body, log)
|
||||
if err == nil {
|
||||
return action
|
||||
}
|
||||
_ = util.SendResponse(404, util.MimeTypeTextPlain, fmt.Sprintf("failed to process request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.proc_req_body_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to process request body: %v", err))
|
||||
return types.ActionContinue
|
||||
}
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
func onHttpResponseHeaders(ctx wrapper.HttpContext, pluginConfig config.PluginConfig, log wrapper.Log) types.Action {
|
||||
if !wrapper.IsResponseFromUpstream() {
|
||||
// Response is not coming from the upstream. Let it pass through.
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
activeProvider := pluginConfig.GetProvider()
|
||||
|
||||
if activeProvider == nil {
|
||||
@@ -139,12 +149,12 @@ func onHttpResponseHeaders(ctx wrapper.HttpContext, pluginConfig config.PluginCo
|
||||
}
|
||||
|
||||
if handler, ok := activeProvider.(provider.ResponseHeadersHandler); ok {
|
||||
apiName := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
apiName, _ := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
action, err := handler.OnResponseHeaders(ctx, apiName, log)
|
||||
if err == nil {
|
||||
return action
|
||||
}
|
||||
_ = util.SendResponse(404, util.MimeTypeTextPlain, fmt.Sprintf("failed to process response headers: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.proc_resp_headers_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to process response headers: %v", err))
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
@@ -171,7 +181,7 @@ func onStreamingResponseBody(ctx wrapper.HttpContext, pluginConfig config.Plugin
|
||||
log.Debugf("isLastChunk=%v chunk: %s", isLastChunk, string(chunk))
|
||||
|
||||
if handler, ok := activeProvider.(provider.StreamingResponseBodyHandler); ok {
|
||||
apiName := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
apiName, _ := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
modifiedChunk, err := handler.OnStreamingResponseBody(ctx, apiName, chunk, isLastChunk, log)
|
||||
if err == nil && modifiedChunk != nil {
|
||||
return modifiedChunk
|
||||
@@ -193,20 +203,23 @@ func onHttpResponseBody(ctx wrapper.HttpContext, pluginConfig config.PluginConfi
|
||||
//log.Debugf("response body: %s", string(body))
|
||||
|
||||
if handler, ok := activeProvider.(provider.ResponseBodyHandler); ok {
|
||||
apiName := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
apiName, _ := ctx.GetContext(ctxKeyApiName).(provider.ApiName)
|
||||
action, err := handler.OnResponseBody(ctx, apiName, body, log)
|
||||
if err == nil {
|
||||
return action
|
||||
}
|
||||
_ = util.SendResponse(404, util.MimeTypeTextPlain, fmt.Sprintf("failed to process response body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.proc_resp_body_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to process response body: %v", err))
|
||||
return types.ActionContinue
|
||||
}
|
||||
return types.ActionContinue
|
||||
}
|
||||
|
||||
func getApiName(path string) provider.ApiName {
|
||||
func getOpenAiApiName(path string) provider.ApiName {
|
||||
if strings.HasSuffix(path, "/v1/chat/completions") {
|
||||
return provider.ApiNameChatCompletion
|
||||
}
|
||||
if strings.HasSuffix(path, "/v1/embeddings") {
|
||||
return provider.ApiNameEmbeddings
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -23,6 +23,9 @@ func (m *azureProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if _, err := url.Parse(config.azureServiceUrl); err != nil {
|
||||
return fmt.Errorf("invalid azureServiceUrl: %w", err)
|
||||
}
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -52,44 +55,43 @@ func (m *azureProvider) GetProviderType() string {
|
||||
}
|
||||
|
||||
func (m *azureProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(m.serviceUrl.RequestURI())
|
||||
_ = util.OverwriteRequestHost(m.serviceUrl.Host)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("api-key", m.config.apiTokens[0])
|
||||
|
||||
if m.contextCache == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
} else {
|
||||
if apiName == ApiNameChatCompletion {
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
} else {
|
||||
ctx.DontReadRequestBody()
|
||||
}
|
||||
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (m *azureProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
if m.contextCache == nil {
|
||||
// We don't need to process the request body for other APIs.
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
if m.contextCache == nil {
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.openai.set_include_usage_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.azure.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.azure.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
@@ -20,6 +21,9 @@ type baichuanProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (m *baichuanProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -45,14 +49,8 @@ func (m *baichuanProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName Api
|
||||
}
|
||||
_ = util.OverwriteRequestPath(baichuanChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(baichuanDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
|
||||
if m.contextCache == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
} else {
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
}
|
||||
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
@@ -73,11 +71,11 @@ func (m *baichuanProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiNam
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.baichuan.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.baichuan.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
341
plugins/wasm-go/extensions/ai-proxy/provider/baidu.go
Normal file
341
plugins/wasm-go/extensions/ai-proxy/provider/baidu.go
Normal file
@@ -0,0 +1,341 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
// baiduProvider is the provider for baidu ernie bot service.
|
||||
|
||||
const (
|
||||
baiduDomain = "aip.baidubce.com"
|
||||
)
|
||||
|
||||
var baiduModelToPathSuffixMap = map[string]string{
|
||||
"ERNIE-4.0-8K": "completions_pro",
|
||||
"ERNIE-3.5-8K": "completions",
|
||||
"ERNIE-3.5-128K": "ernie-3.5-128k",
|
||||
"ERNIE-Speed-8K": "ernie_speed",
|
||||
"ERNIE-Speed-128K": "ernie-speed-128k",
|
||||
"ERNIE-Tiny-8K": "ernie-tiny-8k",
|
||||
"ERNIE-Bot-8K": "ernie_bot_8k",
|
||||
"BLOOMZ-7B": "bloomz_7b1",
|
||||
}
|
||||
|
||||
type baiduProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (b *baiduProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *baiduProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &baiduProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type baiduProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (b *baiduProvider) GetProviderType() string {
|
||||
return providerTypeBaidu
|
||||
}
|
||||
|
||||
func (b *baiduProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestHost(baiduDomain)
|
||||
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
// Delay the header processing to allow changing streaming mode in OnRequestBody
|
||||
return types.HeaderStopIteration, nil
|
||||
}
|
||||
|
||||
func (b *baiduProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
// 使用文心一言接口协议
|
||||
if b.config.protocol == protocolOriginal {
|
||||
request := &baiduTextGenRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("request model is empty")
|
||||
}
|
||||
// 根据模型重写requestPath
|
||||
path := b.getRequestPath(request.Model)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
if b.config.context == nil {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
err := b.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.baidu.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
b.setSystemContent(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.baidu.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// 映射模型重写requestPath
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, b.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
path := b.getRequestPath(mappedModel)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
if b.config.context == nil {
|
||||
baiduRequest := b.baiduTextGenRequest(request)
|
||||
return types.ActionContinue, replaceJsonRequestBody(baiduRequest, log)
|
||||
}
|
||||
|
||||
err := b.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.baidu.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
baiduRequest := b.baiduTextGenRequest(request)
|
||||
if err := replaceJsonRequestBody(baiduRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.baidu.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace Request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (b *baiduProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
// 使用文心一言接口协议,跳过OnStreamingResponseBody()和OnResponseBody()
|
||||
if b.config.protocol == protocolOriginal {
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (b *baiduProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if isLastChunk || len(chunk) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
// sample event response:
|
||||
// data: {"id":"as-vb0m37ti8y","object":"chat.completion","created":1709089502,"sentence_id":0,"is_end":false,"is_truncated":false,"result":"当然可以,","need_clear_history":false,"finish_reason":"normal","usage":{"prompt_tokens":5,"completion_tokens":2,"total_tokens":7}}
|
||||
|
||||
// sample end event response:
|
||||
// data: {"id":"as-vb0m37ti8y","object":"chat.completion","created":1709089531,"sentence_id":20,"is_end":true,"is_truncated":false,"result":"","need_clear_history":false,"finish_reason":"normal","usage":{"prompt_tokens":5,"completion_tokens":420,"total_tokens":425}}
|
||||
responseBuilder := &strings.Builder{}
|
||||
lines := strings.Split(string(chunk), "\n")
|
||||
for _, data := range lines {
|
||||
if len(data) < 6 {
|
||||
// ignore blank line or wrong format
|
||||
continue
|
||||
}
|
||||
data = data[6:]
|
||||
var baiduResponse baiduTextGenStreamResponse
|
||||
if err := json.Unmarshal([]byte(data), &baiduResponse); err != nil {
|
||||
log.Errorf("unable to unmarshal baidu response: %v", err)
|
||||
continue
|
||||
}
|
||||
response := b.streamResponseBaidu2OpenAI(ctx, &baiduResponse)
|
||||
responseBody, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
log.Errorf("unable to marshal response: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
b.appendResponse(responseBuilder, string(responseBody))
|
||||
}
|
||||
modifiedResponseChunk := responseBuilder.String()
|
||||
log.Debugf("=== modified response chunk: %s", modifiedResponseChunk)
|
||||
return []byte(modifiedResponseChunk), nil
|
||||
}
|
||||
|
||||
func (b *baiduProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
baiduResponse := &baiduTextGenResponse{}
|
||||
if err := json.Unmarshal(body, baiduResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal baidu response: %v", err)
|
||||
}
|
||||
if baiduResponse.ErrorMsg != "" {
|
||||
return types.ActionContinue, fmt.Errorf("baidu response error, error_code: %d, error_message: %s", baiduResponse.ErrorCode, baiduResponse.ErrorMsg)
|
||||
}
|
||||
response := b.responseBaidu2OpenAI(ctx, baiduResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
type baiduTextGenRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []chatMessage `json:"messages"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
TopP float64 `json:"top_p,omitempty"`
|
||||
PenaltyScore float64 `json:"penalty_score,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
System string `json:"system,omitempty"`
|
||||
DisableSearch bool `json:"disable_search,omitempty"`
|
||||
EnableCitation bool `json:"enable_citation,omitempty"`
|
||||
MaxOutputTokens int `json:"max_output_tokens,omitempty"`
|
||||
UserId string `json:"user_id,omitempty"`
|
||||
}
|
||||
|
||||
func (b *baiduProvider) getRequestPath(baiduModel string) string {
|
||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t
|
||||
suffix, ok := baiduModelToPathSuffixMap[baiduModel]
|
||||
if !ok {
|
||||
suffix = baiduModel
|
||||
}
|
||||
return fmt.Sprintf("/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/%s?access_token=%s", suffix, b.config.GetRandomToken())
|
||||
}
|
||||
|
||||
func (b *baiduProvider) setSystemContent(request *baiduTextGenRequest, content string) {
|
||||
request.System = content
|
||||
}
|
||||
|
||||
func (b *baiduProvider) baiduTextGenRequest(request *chatCompletionRequest) *baiduTextGenRequest {
|
||||
baiduRequest := baiduTextGenRequest{
|
||||
Messages: make([]chatMessage, 0, len(request.Messages)),
|
||||
Temperature: request.Temperature,
|
||||
TopP: request.TopP,
|
||||
PenaltyScore: request.FrequencyPenalty,
|
||||
Stream: request.Stream,
|
||||
DisableSearch: false,
|
||||
EnableCitation: false,
|
||||
MaxOutputTokens: request.MaxTokens,
|
||||
UserId: request.User,
|
||||
}
|
||||
for _, message := range request.Messages {
|
||||
if message.Role == roleSystem {
|
||||
baiduRequest.System = message.Content
|
||||
} else {
|
||||
baiduRequest.Messages = append(baiduRequest.Messages, chatMessage{
|
||||
Role: message.Role,
|
||||
Content: message.Content,
|
||||
})
|
||||
}
|
||||
}
|
||||
return &baiduRequest
|
||||
}
|
||||
|
||||
type baiduTextGenResponse struct {
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int64 `json:"created"`
|
||||
Result string `json:"result"`
|
||||
IsTruncated bool `json:"is_truncated"`
|
||||
NeedClearHistory bool `json:"need_clear_history"`
|
||||
Usage baiduTextGenResponseUsage `json:"usage"`
|
||||
baiduTextGenResponseError
|
||||
}
|
||||
|
||||
type baiduTextGenResponseError struct {
|
||||
ErrorCode int `json:"error_code"`
|
||||
ErrorMsg string `json:"error_msg"`
|
||||
}
|
||||
|
||||
type baiduTextGenStreamResponse struct {
|
||||
baiduTextGenResponse
|
||||
SentenceId int `json:"sentence_id"`
|
||||
IsEnd bool `json:"is_end"`
|
||||
}
|
||||
|
||||
type baiduTextGenResponseUsage struct {
|
||||
PromptTokens int `json:"prompt_tokens"`
|
||||
CompletionTokens int `json:"completion_tokens"`
|
||||
TotalTokens int `json:"total_tokens"`
|
||||
}
|
||||
|
||||
func (b *baiduProvider) responseBaidu2OpenAI(ctx wrapper.HttpContext, response *baiduTextGenResponse) *chatCompletionResponse {
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Message: &chatMessage{Role: roleAssistant, Content: response.Result},
|
||||
FinishReason: finishReasonStop,
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletion,
|
||||
Choices: []chatCompletionChoice{choice},
|
||||
Usage: usage{
|
||||
PromptTokens: response.Usage.PromptTokens,
|
||||
CompletionTokens: response.Usage.CompletionTokens,
|
||||
TotalTokens: response.Usage.TotalTokens,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (b *baiduProvider) streamResponseBaidu2OpenAI(ctx wrapper.HttpContext, response *baiduTextGenStreamResponse) *chatCompletionResponse {
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Message: &chatMessage{Role: roleAssistant, Content: response.Result},
|
||||
}
|
||||
if response.IsEnd {
|
||||
choice.FinishReason = finishReasonStop
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletionChunk,
|
||||
Choices: []chatCompletionChoice{choice},
|
||||
Usage: usage{
|
||||
PromptTokens: response.Usage.PromptTokens,
|
||||
CompletionTokens: response.Usage.CompletionTokens,
|
||||
TotalTokens: response.Usage.TotalTokens,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (b *baiduProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) {
|
||||
responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody))
|
||||
}
|
||||
371
plugins/wasm-go/extensions/ai-proxy/provider/claude.go
Normal file
371
plugins/wasm-go/extensions/ai-proxy/provider/claude.go
Normal file
@@ -0,0 +1,371 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
// claudeProvider is the provider for Claude service.
|
||||
const (
|
||||
claudeDomain = "api.anthropic.com"
|
||||
claudeChatCompletionPath = "/v1/messages"
|
||||
defaultVersion = "2023-06-01"
|
||||
defaultMaxTokens = 4096
|
||||
)
|
||||
|
||||
type claudeProviderInitializer struct{}
|
||||
|
||||
type claudeTextGenRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []chatMessage `json:"messages"`
|
||||
System string `json:"system,omitempty"`
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
TopP float64 `json:"top_p,omitempty"`
|
||||
TopK int `json:"top_k,omitempty"`
|
||||
}
|
||||
|
||||
type claudeTextGenResponse struct {
|
||||
Id string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Role string `json:"role"`
|
||||
Content []claudeTextGenContent `json:"content"`
|
||||
Model string `json:"model"`
|
||||
StopReason *string `json:"stop_reason"`
|
||||
StopSequence *string `json:"stop_sequence"`
|
||||
Usage claudeTextGenUsage `json:"usage"`
|
||||
Error *claudeTextGenError `json:"error"`
|
||||
}
|
||||
|
||||
type claudeTextGenContent struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text,omitempty"`
|
||||
}
|
||||
|
||||
type claudeTextGenUsage struct {
|
||||
InputTokens int `json:"input_tokens"`
|
||||
OutputTokens int `json:"output_tokens"`
|
||||
}
|
||||
|
||||
type claudeTextGenError struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type claudeTextGenStreamResponse struct {
|
||||
Type string `json:"type"`
|
||||
Message claudeTextGenResponse `json:"message"`
|
||||
Index int `json:"index"`
|
||||
ContentBlock *claudeTextGenContent `json:"content_block"`
|
||||
Delta *claudeTextGenDelta `json:"delta"`
|
||||
Usage claudeTextGenUsage `json:"usage"`
|
||||
}
|
||||
|
||||
type claudeTextGenDelta struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text"`
|
||||
StopReason *string `json:"stop_reason"`
|
||||
StopSequence *string `json:"stop_sequence"`
|
||||
}
|
||||
|
||||
func (c *claudeProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *claudeProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &claudeProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type claudeProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (c *claudeProvider) GetProviderType() string {
|
||||
return providerTypeClaude
|
||||
}
|
||||
|
||||
func (c *claudeProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
_ = util.OverwriteRequestPath(claudeChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(claudeDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("x-api-key", c.config.GetRandomToken())
|
||||
|
||||
if c.config.claudeVersion == "" {
|
||||
c.config.claudeVersion = defaultVersion
|
||||
}
|
||||
_ = proxywasm.AddHttpRequestHeader("anthropic-version", c.config.claudeVersion)
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (c *claudeProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
// use original protocol
|
||||
if c.config.protocol == protocolOriginal {
|
||||
if c.config.context == nil {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
request := &claudeTextGenRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
|
||||
err := c.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.claude.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.claude.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// use openai protocol
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, c.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
|
||||
streaming := request.Stream
|
||||
if streaming {
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Accept", "text/event-stream")
|
||||
}
|
||||
|
||||
if c.config.context == nil {
|
||||
claudeRequest := c.buildClaudeTextGenRequest(request)
|
||||
return types.ActionContinue, replaceJsonRequestBody(claudeRequest, log)
|
||||
}
|
||||
|
||||
err := c.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.claude.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
claudeRequest := c.buildClaudeTextGenRequest(request)
|
||||
if err := replaceJsonRequestBody(claudeRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.claude.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (c *claudeProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
claudeResponse := &claudeTextGenResponse{}
|
||||
if err := json.Unmarshal(body, claudeResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal claude response: %v", err)
|
||||
}
|
||||
if claudeResponse.Error != nil {
|
||||
return types.ActionContinue, fmt.Errorf("claude response error, error_type: %s, error_message: %s", claudeResponse.Error.Type, claudeResponse.Error.Message)
|
||||
}
|
||||
response := c.responseClaude2OpenAI(ctx, claudeResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (c *claudeProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
// use original protocol, skip OnStreamingResponseBody() and OnResponseBody()
|
||||
if c.config.protocol == protocolOriginal {
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (c *claudeProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if isLastChunk || len(chunk) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
responseBuilder := &strings.Builder{}
|
||||
lines := strings.Split(string(chunk), "\n")
|
||||
for _, data := range lines {
|
||||
// only process the line starting with "data:"
|
||||
if strings.HasPrefix(data, "data:") {
|
||||
// extract json data from the line
|
||||
jsonData := strings.TrimPrefix(data, "data:")
|
||||
var claudeResponse claudeTextGenStreamResponse
|
||||
if err := json.Unmarshal([]byte(jsonData), &claudeResponse); err != nil {
|
||||
log.Errorf("unable to unmarshal claude response: %v", err)
|
||||
continue
|
||||
}
|
||||
response := c.streamResponseClaude2OpenAI(ctx, &claudeResponse, log)
|
||||
if response != nil {
|
||||
responseBody, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
log.Errorf("unable to marshal response: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
c.appendResponse(responseBuilder, string(responseBody))
|
||||
}
|
||||
}
|
||||
}
|
||||
modifiedResponseChunk := responseBuilder.String()
|
||||
log.Debugf("modified response chunk: %s", modifiedResponseChunk)
|
||||
return []byte(modifiedResponseChunk), nil
|
||||
}
|
||||
|
||||
func (c *claudeProvider) buildClaudeTextGenRequest(origRequest *chatCompletionRequest) *claudeTextGenRequest {
|
||||
claudeRequest := claudeTextGenRequest{
|
||||
Model: origRequest.Model,
|
||||
MaxTokens: origRequest.MaxTokens,
|
||||
StopSequences: origRequest.Stop,
|
||||
Stream: origRequest.Stream,
|
||||
Temperature: origRequest.Temperature,
|
||||
TopP: origRequest.TopP,
|
||||
}
|
||||
if claudeRequest.MaxTokens == 0 {
|
||||
claudeRequest.MaxTokens = defaultMaxTokens
|
||||
}
|
||||
|
||||
for _, message := range origRequest.Messages {
|
||||
if message.Role == roleSystem {
|
||||
claudeRequest.System = message.Content
|
||||
continue
|
||||
}
|
||||
claudeMessage := chatMessage{
|
||||
Role: message.Role,
|
||||
Content: message.Content,
|
||||
}
|
||||
claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage)
|
||||
}
|
||||
return &claudeRequest
|
||||
}
|
||||
|
||||
func (c *claudeProvider) responseClaude2OpenAI(ctx wrapper.HttpContext, origResponse *claudeTextGenResponse) *chatCompletionResponse {
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Message: &chatMessage{Role: roleAssistant, Content: origResponse.Content[0].Text},
|
||||
FinishReason: stopReasonClaude2OpenAI(origResponse.StopReason),
|
||||
}
|
||||
|
||||
return &chatCompletionResponse{
|
||||
Id: origResponse.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletion,
|
||||
Choices: []chatCompletionChoice{choice},
|
||||
Usage: usage{
|
||||
PromptTokens: origResponse.Usage.InputTokens,
|
||||
CompletionTokens: origResponse.Usage.OutputTokens,
|
||||
TotalTokens: origResponse.Usage.InputTokens + origResponse.Usage.OutputTokens,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func stopReasonClaude2OpenAI(reason *string) string {
|
||||
if reason == nil {
|
||||
return ""
|
||||
}
|
||||
switch *reason {
|
||||
case "end_turn":
|
||||
return finishReasonStop
|
||||
case "stop_sequence":
|
||||
return finishReasonStop
|
||||
case "max_tokens":
|
||||
return finishReasonLength
|
||||
default:
|
||||
return *reason
|
||||
}
|
||||
}
|
||||
|
||||
func (c *claudeProvider) streamResponseClaude2OpenAI(ctx wrapper.HttpContext, origResponse *claudeTextGenStreamResponse, log wrapper.Log) *chatCompletionResponse {
|
||||
switch origResponse.Type {
|
||||
case "message_start":
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Delta: &chatMessage{Role: roleAssistant, Content: ""},
|
||||
}
|
||||
return createChatCompletionResponse(ctx, origResponse, choice)
|
||||
|
||||
case "content_block_delta":
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Delta: &chatMessage{Content: origResponse.Delta.Text},
|
||||
}
|
||||
return createChatCompletionResponse(ctx, origResponse, choice)
|
||||
|
||||
case "message_delta":
|
||||
choice := chatCompletionChoice{
|
||||
Index: 0,
|
||||
Delta: &chatMessage{},
|
||||
FinishReason: stopReasonClaude2OpenAI(origResponse.Delta.StopReason),
|
||||
}
|
||||
return createChatCompletionResponse(ctx, origResponse, choice)
|
||||
case "content_block_stop", "message_stop":
|
||||
log.Debugf("skip processing response type: %s", origResponse.Type)
|
||||
return nil
|
||||
default:
|
||||
log.Errorf("Unexpected response type: %s", origResponse.Type)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func createChatCompletionResponse(ctx wrapper.HttpContext, response *claudeTextGenStreamResponse, choice chatCompletionChoice) *chatCompletionResponse {
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Message.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Object: objectChatCompletionChunk,
|
||||
Choices: []chatCompletionChoice{choice},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *claudeProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) {
|
||||
responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody))
|
||||
}
|
||||
109
plugins/wasm-go/extensions/ai-proxy/provider/cloudflare.go
Normal file
109
plugins/wasm-go/extensions/ai-proxy/provider/cloudflare.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
const (
|
||||
cloudflareDomain = "api.cloudflare.com"
|
||||
// https://developers.cloudflare.com/workers-ai/configuration/open-ai-compatibility/
|
||||
cloudflareChatCompletionPath = "/client/v4/accounts/{account_id}/ai/v1/chat/completions"
|
||||
)
|
||||
|
||||
type cloudflareProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (c *cloudflareProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *cloudflareProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &cloudflareProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type cloudflareProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (c *cloudflareProvider) GetProviderType() string {
|
||||
return providerTypeCloudflare
|
||||
}
|
||||
|
||||
func (c *cloudflareProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(strings.Replace(cloudflareChatCompletionPath, "{account_id}", c.config.cloudflareAccountId, 1))
|
||||
_ = util.OverwriteRequestHost(cloudflareDomain)
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + c.config.GetRandomToken())
|
||||
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (c *cloudflareProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, c.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
|
||||
streaming := request.Stream
|
||||
if streaming {
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Accept", "text/event-stream")
|
||||
}
|
||||
|
||||
if c.contextCache == nil {
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.cloudflare.transform_body_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
err := c.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.cloudflare.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.cloudflare.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
@@ -20,6 +21,9 @@ type deepseekProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (m *deepseekProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -45,14 +49,8 @@ func (m *deepseekProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName Api
|
||||
}
|
||||
_ = util.OverwriteRequestPath(deepseekChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(deepseekDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
|
||||
if m.contextCache == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
} else {
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
}
|
||||
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
@@ -73,11 +71,11 @@ func (m *deepseekProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiNam
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.deepseek.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.deepseek.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
606
plugins/wasm-go/extensions/ai-proxy/provider/gemini.go
Normal file
606
plugins/wasm-go/extensions/ai-proxy/provider/gemini.go
Normal file
@@ -0,0 +1,606 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/google/uuid"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// geminiProvider is the provider for google gemini/gemini flash service.
|
||||
|
||||
const (
|
||||
geminiApiKeyHeader = "x-goog-api-key"
|
||||
geminiDomain = "generativelanguage.googleapis.com"
|
||||
)
|
||||
|
||||
type geminiProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (g *geminiProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *geminiProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &geminiProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type geminiProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (g *geminiProvider) GetProviderType() string {
|
||||
return providerTypeGemini
|
||||
}
|
||||
|
||||
func (g *geminiProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion && apiName != ApiNameEmbeddings {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
_ = proxywasm.ReplaceHttpRequestHeader(geminiApiKeyHeader, g.config.GetRandomToken())
|
||||
_ = util.OverwriteRequestHost(geminiDomain)
|
||||
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
// Delay the header processing to allow changing streaming mode in OnRequestBody
|
||||
return types.HeaderStopIteration, nil
|
||||
}
|
||||
|
||||
func (g *geminiProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName == ApiNameChatCompletion {
|
||||
return g.onChatCompletionRequestBody(ctx, body, log)
|
||||
} else if apiName == ApiNameEmbeddings {
|
||||
return g.onEmbeddingsRequestBody(ctx, body, log)
|
||||
}
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
func (g *geminiProvider) onChatCompletionRequestBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
// 使用gemini接口协议
|
||||
if g.config.protocol == protocolOriginal {
|
||||
request := &geminiChatRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("request model is empty")
|
||||
}
|
||||
// 根据模型重写requestPath
|
||||
path := g.getRequestPath(ApiNameChatCompletion, request.Model, request.Stream)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
// 移除多余的model和stream字段
|
||||
request = &geminiChatRequest{
|
||||
Contents: request.Contents,
|
||||
SafetySettings: request.SafetySettings,
|
||||
GenerationConfig: request.GenerationConfig,
|
||||
Tools: request.Tools,
|
||||
}
|
||||
if g.config.context == nil {
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
|
||||
err := g.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.gemini.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
g.setSystemContent(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.gemini.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// 映射模型重写requestPath
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, g.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
path := g.getRequestPath(ApiNameChatCompletion, mappedModel, request.Stream)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
if g.config.context == nil {
|
||||
geminiRequest := g.buildGeminiChatRequest(request)
|
||||
return types.ActionContinue, replaceJsonRequestBody(geminiRequest, log)
|
||||
}
|
||||
|
||||
err := g.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.gemini.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
geminiRequest := g.buildGeminiChatRequest(request)
|
||||
if err := replaceJsonRequestBody(geminiRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.gemini.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (g *geminiProvider) onEmbeddingsRequestBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
// 使用gemini接口协议
|
||||
if g.config.protocol == protocolOriginal {
|
||||
request := &geminiBatchEmbeddingRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("request model is empty")
|
||||
}
|
||||
// 根据模型重写requestPath
|
||||
path := g.getRequestPath(ApiNameEmbeddings, request.Model, false)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
// 移除多余的model字段
|
||||
request = &geminiBatchEmbeddingRequest{
|
||||
Requests: request.Requests,
|
||||
}
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
request := &embeddingsRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
|
||||
// 映射模型重写requestPath
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in embeddings request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, g.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
path := g.getRequestPath(ApiNameEmbeddings, mappedModel, false)
|
||||
_ = util.OverwriteRequestPath(path)
|
||||
|
||||
geminiRequest := g.buildBatchEmbeddingRequest(request)
|
||||
return types.ActionContinue, replaceJsonRequestBody(geminiRequest, log)
|
||||
}
|
||||
|
||||
func (g *geminiProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if g.config.protocol == protocolOriginal {
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (g *geminiProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
log.Infof("chunk body:%s", string(chunk))
|
||||
if isLastChunk || len(chunk) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
// sample end event response:
|
||||
// data: {"candidates": [{"content": {"parts": [{"text": "我是 Gemini,一个大型多模态模型,由 Google 训练。我的职责是尽我所能帮助您,并尽力提供全面且信息丰富的答复。"}],"role": "model"},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 2,"candidatesTokenCount": 35,"totalTokenCount": 37}}
|
||||
responseBuilder := &strings.Builder{}
|
||||
lines := strings.Split(string(chunk), "\n")
|
||||
for _, data := range lines {
|
||||
if len(data) < 6 {
|
||||
// ignore blank line or wrong format
|
||||
continue
|
||||
}
|
||||
data = data[6:]
|
||||
var geminiResp geminiChatResponse
|
||||
if err := json.Unmarshal([]byte(data), &geminiResp); err != nil {
|
||||
log.Errorf("unable to unmarshal gemini response: %v", err)
|
||||
continue
|
||||
}
|
||||
response := g.buildChatCompletionStreamResponse(ctx, &geminiResp)
|
||||
responseBody, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
log.Errorf("unable to marshal response: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
g.appendResponse(responseBuilder, string(responseBody))
|
||||
}
|
||||
modifiedResponseChunk := responseBuilder.String()
|
||||
log.Debugf("=== modified response chunk: %s", modifiedResponseChunk)
|
||||
return []byte(modifiedResponseChunk), nil
|
||||
}
|
||||
|
||||
func (g *geminiProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName == ApiNameChatCompletion {
|
||||
return g.onChatCompletionResponseBody(ctx, body, log)
|
||||
} else if apiName == ApiNameEmbeddings {
|
||||
return g.onEmbeddingsResponseBody(ctx, body, log)
|
||||
}
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
func (g *geminiProvider) onChatCompletionResponseBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
geminiResponse := &geminiChatResponse{}
|
||||
if err := json.Unmarshal(body, geminiResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal gemini chat response: %v", err)
|
||||
}
|
||||
if geminiResponse.Error != nil {
|
||||
return types.ActionContinue, fmt.Errorf("gemini chat completion response error, error_code: %d, error_status:%s, error_message: %s",
|
||||
geminiResponse.Error.Code, geminiResponse.Error.Status, geminiResponse.Error.Message)
|
||||
}
|
||||
response := g.buildChatCompletionResponse(ctx, geminiResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (g *geminiProvider) onEmbeddingsResponseBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
geminiResponse := &geminiEmbeddingResponse{}
|
||||
if err := json.Unmarshal(body, geminiResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal gemini embeddings response: %v", err)
|
||||
}
|
||||
if geminiResponse.Error != nil {
|
||||
return types.ActionContinue, fmt.Errorf("gemini embeddings response error, error_code: %d, error_status:%s, error_message: %s",
|
||||
geminiResponse.Error.Code, geminiResponse.Error.Status, geminiResponse.Error.Message)
|
||||
}
|
||||
response := g.buildEmbeddingsResponse(ctx, geminiResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (g *geminiProvider) getRequestPath(apiName ApiName, geminiModel string, stream bool) string {
|
||||
action := ""
|
||||
if apiName == ApiNameEmbeddings {
|
||||
action = "batchEmbedContents"
|
||||
} else if stream {
|
||||
action = "streamGenerateContent?alt=sse"
|
||||
} else {
|
||||
action = "generateContent"
|
||||
}
|
||||
return fmt.Sprintf("/v1/models/%s:%s", geminiModel, action)
|
||||
}
|
||||
|
||||
type geminiChatRequest struct {
|
||||
// Model and Stream are only used when using the gemini original protocol
|
||||
Model string `json:"model,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Contents []geminiChatContent `json:"contents"`
|
||||
SafetySettings []geminiChatSafetySetting `json:"safety_settings,omitempty"`
|
||||
GenerationConfig geminiChatGenerationConfig `json:"generation_config,omitempty"`
|
||||
Tools []geminiChatTools `json:"tools,omitempty"`
|
||||
}
|
||||
|
||||
type geminiChatContent struct {
|
||||
Role string `json:"role,omitempty"`
|
||||
Parts []geminiPart `json:"parts"`
|
||||
}
|
||||
|
||||
type geminiChatSafetySetting struct {
|
||||
Category string `json:"category"`
|
||||
Threshold string `json:"threshold"`
|
||||
}
|
||||
|
||||
type geminiChatGenerationConfig struct {
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
TopP float64 `json:"topP,omitempty"`
|
||||
TopK float64 `json:"topK,omitempty"`
|
||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||
CandidateCount int `json:"candidateCount,omitempty"`
|
||||
StopSequences []string `json:"stopSequences,omitempty"`
|
||||
}
|
||||
|
||||
type geminiChatTools struct {
|
||||
FunctionDeclarations any `json:"function_declarations,omitempty"`
|
||||
}
|
||||
|
||||
type geminiPart struct {
|
||||
Text string `json:"text,omitempty"`
|
||||
InlineData *geminiInlineData `json:"inlineData,omitempty"`
|
||||
FunctionCall *geminiFunctionCall `json:"functionCall,omitempty"`
|
||||
}
|
||||
|
||||
type geminiInlineData struct {
|
||||
MimeType string `json:"mimeType"`
|
||||
Data string `json:"data"`
|
||||
}
|
||||
|
||||
type geminiFunctionCall struct {
|
||||
FunctionName string `json:"name"`
|
||||
Arguments any `json:"args"`
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildGeminiChatRequest(request *chatCompletionRequest) *geminiChatRequest {
|
||||
var safetySettings []geminiChatSafetySetting
|
||||
{
|
||||
}
|
||||
for category, threshold := range g.config.geminiSafetySetting {
|
||||
safetySettings = append(safetySettings, geminiChatSafetySetting{
|
||||
Category: category,
|
||||
Threshold: threshold,
|
||||
})
|
||||
}
|
||||
geminiRequest := geminiChatRequest{
|
||||
Contents: make([]geminiChatContent, 0, len(request.Messages)),
|
||||
SafetySettings: safetySettings,
|
||||
GenerationConfig: geminiChatGenerationConfig{
|
||||
Temperature: request.Temperature,
|
||||
TopP: request.TopP,
|
||||
MaxOutputTokens: request.MaxTokens,
|
||||
},
|
||||
}
|
||||
if request.Tools != nil {
|
||||
functions := make([]function, 0, len(request.Tools))
|
||||
for _, tool := range request.Tools {
|
||||
functions = append(functions, tool.Function)
|
||||
}
|
||||
geminiRequest.Tools = []geminiChatTools{
|
||||
{
|
||||
FunctionDeclarations: functions,
|
||||
},
|
||||
}
|
||||
}
|
||||
shouldAddDummyModelMessage := false
|
||||
for _, message := range request.Messages {
|
||||
content := geminiChatContent{
|
||||
Role: message.Role,
|
||||
Parts: []geminiPart{
|
||||
{
|
||||
Text: message.Content,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// there's no assistant role in gemini and API shall vomit if role is not user or model
|
||||
if content.Role == roleAssistant {
|
||||
content.Role = "model"
|
||||
} else if content.Role == roleSystem { // converting system prompt to prompt from user for the same reason
|
||||
content.Role = roleUser
|
||||
shouldAddDummyModelMessage = true
|
||||
}
|
||||
geminiRequest.Contents = append(geminiRequest.Contents, content)
|
||||
|
||||
// if a system message is the last message, we need to add a dummy model message to make gemini happy
|
||||
if shouldAddDummyModelMessage {
|
||||
geminiRequest.Contents = append(geminiRequest.Contents, geminiChatContent{
|
||||
Role: "model",
|
||||
Parts: []geminiPart{
|
||||
{
|
||||
Text: "Okay",
|
||||
},
|
||||
},
|
||||
})
|
||||
shouldAddDummyModelMessage = false
|
||||
}
|
||||
}
|
||||
|
||||
return &geminiRequest
|
||||
}
|
||||
|
||||
func (g *geminiProvider) setSystemContent(request *geminiChatRequest, content string) {
|
||||
systemContents := []geminiChatContent{{
|
||||
Role: roleUser,
|
||||
Parts: []geminiPart{
|
||||
{
|
||||
Text: content,
|
||||
},
|
||||
},
|
||||
}}
|
||||
request.Contents = append(systemContents, request.Contents...)
|
||||
}
|
||||
|
||||
type geminiBatchEmbeddingRequest struct {
|
||||
// Model are only used when using the gemini original protocol
|
||||
Model string `json:"model,omitempty"`
|
||||
Requests []geminiEmbeddingRequest `json:"requests"`
|
||||
}
|
||||
|
||||
type geminiEmbeddingRequest struct {
|
||||
Model string `json:"model"`
|
||||
Content geminiChatContent `json:"content"`
|
||||
TaskType string `json:"taskType,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
OutputDimensionality int `json:"outputDimensionality,omitempty"`
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildBatchEmbeddingRequest(request *embeddingsRequest) *geminiBatchEmbeddingRequest {
|
||||
inputs := request.ParseInput()
|
||||
requests := make([]geminiEmbeddingRequest, len(inputs))
|
||||
model := fmt.Sprintf("models/%s", request.Model)
|
||||
|
||||
for i, input := range inputs {
|
||||
requests[i] = geminiEmbeddingRequest{
|
||||
Model: model,
|
||||
Content: geminiChatContent{
|
||||
Parts: []geminiPart{
|
||||
{
|
||||
Text: input,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &geminiBatchEmbeddingRequest{
|
||||
Requests: requests,
|
||||
}
|
||||
}
|
||||
|
||||
type geminiChatResponse struct {
|
||||
Candidates []geminiChatCandidate `json:"candidates"`
|
||||
PromptFeedback geminiChatPromptFeedback `json:"promptFeedback"`
|
||||
UsageMetadata geminiUsageMetadata `json:"usageMetadata"`
|
||||
Error *geminiResponseError `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type geminiChatCandidate struct {
|
||||
Content geminiChatContent `json:"content"`
|
||||
FinishReason string `json:"finishReason"`
|
||||
Index int64 `json:"index"`
|
||||
SafetyRatings []geminiChatSafetyRating `json:"safetyRatings"`
|
||||
}
|
||||
|
||||
type geminiChatPromptFeedback struct {
|
||||
SafetyRatings []geminiChatSafetyRating `json:"safetyRatings"`
|
||||
}
|
||||
|
||||
type geminiUsageMetadata struct {
|
||||
PromptTokenCount int `json:"promptTokenCount,omitempty"`
|
||||
CandidatesTokenCount int `json:"candidatesTokenCount,omitempty"`
|
||||
TotalTokenCount int `json:"totalTokenCount,omitempty"`
|
||||
}
|
||||
|
||||
type geminiResponseError struct {
|
||||
Code int `json:"code,omitempty"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type geminiChatSafetyRating struct {
|
||||
Category string `json:"category"`
|
||||
Probability string `json:"probability"`
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildChatCompletionResponse(ctx wrapper.HttpContext, response *geminiChatResponse) *chatCompletionResponse {
|
||||
fullTextResponse := chatCompletionResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", uuid.New().String()),
|
||||
Object: objectChatCompletion,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Choices: make([]chatCompletionChoice, 0, len(response.Candidates)),
|
||||
Usage: usage{
|
||||
PromptTokens: response.UsageMetadata.PromptTokenCount,
|
||||
CompletionTokens: response.UsageMetadata.CandidatesTokenCount,
|
||||
TotalTokens: response.UsageMetadata.TotalTokenCount,
|
||||
},
|
||||
}
|
||||
for i, candidate := range response.Candidates {
|
||||
choice := chatCompletionChoice{
|
||||
Index: i,
|
||||
Message: &chatMessage{
|
||||
Role: roleAssistant,
|
||||
},
|
||||
FinishReason: finishReasonStop,
|
||||
}
|
||||
if len(candidate.Content.Parts) > 0 {
|
||||
if candidate.Content.Parts[0].FunctionCall != nil {
|
||||
choice.Message.ToolCalls = g.buildToolCalls(&candidate)
|
||||
} else {
|
||||
choice.Message.Content = candidate.Content.Parts[0].Text
|
||||
}
|
||||
} else {
|
||||
choice.Message.Content = ""
|
||||
choice.FinishReason = candidate.FinishReason
|
||||
}
|
||||
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
||||
}
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildToolCalls(candidate *geminiChatCandidate) []toolCall {
|
||||
var toolCalls []toolCall
|
||||
|
||||
item := candidate.Content.Parts[0]
|
||||
if item.FunctionCall != nil {
|
||||
return toolCalls
|
||||
}
|
||||
argsBytes, err := json.Marshal(item.FunctionCall.Arguments)
|
||||
if err != nil {
|
||||
proxywasm.LogErrorf("get toolCalls from gemini response failed: " + err.Error())
|
||||
return toolCalls
|
||||
}
|
||||
toolCall := toolCall{
|
||||
Id: fmt.Sprintf("call_%s", uuid.New().String()),
|
||||
Type: "function",
|
||||
Function: functionCall{
|
||||
Arguments: string(argsBytes),
|
||||
Name: item.FunctionCall.FunctionName,
|
||||
},
|
||||
}
|
||||
toolCalls = append(toolCalls, toolCall)
|
||||
return toolCalls
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildChatCompletionStreamResponse(ctx wrapper.HttpContext, geminiResp *geminiChatResponse) *chatCompletionResponse {
|
||||
var choice chatCompletionChoice
|
||||
if len(geminiResp.Candidates) > 0 && len(geminiResp.Candidates[0].Content.Parts) > 0 {
|
||||
choice.Delta = &chatMessage{Content: geminiResp.Candidates[0].Content.Parts[0].Text}
|
||||
}
|
||||
streamResponse := chatCompletionResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", uuid.New().String()),
|
||||
Object: objectChatCompletionChunk,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Choices: []chatCompletionChoice{choice},
|
||||
Usage: usage{
|
||||
PromptTokens: geminiResp.UsageMetadata.PromptTokenCount,
|
||||
CompletionTokens: geminiResp.UsageMetadata.CandidatesTokenCount,
|
||||
TotalTokens: geminiResp.UsageMetadata.TotalTokenCount,
|
||||
},
|
||||
}
|
||||
return &streamResponse
|
||||
}
|
||||
|
||||
type geminiEmbeddingResponse struct {
|
||||
Embeddings []geminiEmbeddingData `json:"embeddings"`
|
||||
Error *geminiResponseError `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type geminiEmbeddingData struct {
|
||||
Values []float64 `json:"values"`
|
||||
}
|
||||
|
||||
func (g *geminiProvider) buildEmbeddingsResponse(ctx wrapper.HttpContext, geminiResp *geminiEmbeddingResponse) *embeddingsResponse {
|
||||
response := embeddingsResponse{
|
||||
Object: "list",
|
||||
Data: make([]embedding, 0, len(geminiResp.Embeddings)),
|
||||
Model: ctx.GetContext(ctxKeyFinalRequestModel).(string),
|
||||
Usage: usage{
|
||||
TotalTokens: 0,
|
||||
},
|
||||
}
|
||||
for _, item := range geminiResp.Embeddings {
|
||||
response.Data = append(response.Data, embedding{
|
||||
Object: `embedding`,
|
||||
Index: 0,
|
||||
Embedding: item.Values,
|
||||
})
|
||||
}
|
||||
return &response
|
||||
}
|
||||
|
||||
func (g *geminiProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) {
|
||||
responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody))
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
@@ -18,6 +19,9 @@ const (
|
||||
type groqProviderInitializer struct{}
|
||||
|
||||
func (m *groqProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -43,14 +47,8 @@ func (m *groqProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(groqChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(groqDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
|
||||
if m.contextCache == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
} else {
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
}
|
||||
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
@@ -71,11 +69,11 @@ func (m *groqProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, b
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.groq.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.groq.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
563
plugins/wasm-go/extensions/ai-proxy/provider/hunyuan.go
Normal file
563
plugins/wasm-go/extensions/ai-proxy/provider/hunyuan.go
Normal file
@@ -0,0 +1,563 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
// hunyuanProvider is the provider for hunyuan AI service.
|
||||
|
||||
const (
|
||||
hunyuanDomain = "hunyuan.tencentcloudapi.com"
|
||||
hunyuanRequestPath = "/"
|
||||
hunyuanChatCompletionTCAction = "ChatCompletions"
|
||||
|
||||
// headers necessary for TC hunyuan api call:
|
||||
// ref: https://cloud.tencent.com/document/api/1729/105701, https://cloud.tencent.com/document/api/1729/101842
|
||||
actionKey = "X-TC-Action"
|
||||
timestampKey = "X-TC-Timestamp"
|
||||
authorizationKey = "Authorization"
|
||||
versionKey = "X-TC-Version"
|
||||
versionValue = "2023-09-01"
|
||||
hostKey = "Host"
|
||||
|
||||
ssePrefix = "data: " // Server-Sent Events (SSE) 类型的流式响应的开始标记
|
||||
hunyuanStreamEndMark = "stop" // 混元的流式的finishReason为stop时,表示结束
|
||||
|
||||
hunyuanAuthKeyLen = 32
|
||||
hunyuanAuthIdLen = 36
|
||||
)
|
||||
|
||||
type hunyuanProviderInitializer struct {
|
||||
}
|
||||
|
||||
// ref: https://console.cloud.tencent.com/api/explorer?Product=hunyuan&Version=2023-09-01&Action=ChatCompletions
|
||||
type hunyuanTextGenRequest struct {
|
||||
Model string `json:"Model"`
|
||||
Messages []hunyuanChatMessage `json:"Messages"`
|
||||
Stream bool `json:"Stream,omitempty"`
|
||||
StreamModeration bool `json:"StreamModeration,omitempty"`
|
||||
TopP float32 `json:"TopP,omitempty"`
|
||||
Temperature float32 `json:"Temperature,omitempty"`
|
||||
EnableEnhancement bool `json:"EnableEnhancement,omitempty"`
|
||||
}
|
||||
|
||||
type hunyuanTextGenResponseNonStreaming struct {
|
||||
Response hunyuanTextGenDetailedResponseNonStreaming `json:"Response"`
|
||||
}
|
||||
|
||||
type hunyuanTextGenDetailedResponseNonStreaming struct {
|
||||
RequestId string `json:"RequestId,omitempty"`
|
||||
Note string `json:"Note"`
|
||||
Choices []hunyuanTextGenChoice `json:"Choices"`
|
||||
Created int64 `json:"Created"`
|
||||
Id string `json:"Id"`
|
||||
Usage hunyuanTextGenUsage `json:"Usage"`
|
||||
}
|
||||
|
||||
type hunyuanTextGenChoice struct {
|
||||
FinishReason string `json:"FinishReason"`
|
||||
Message hunyuanChatMessage `json:"Message,omitempty"` // 当非流式返回时存储大模型生成文字
|
||||
Delta hunyuanChatMessage `json:"Delta,omitempty"` // 流式返回时存储大模型生成文字
|
||||
}
|
||||
|
||||
type hunyuanTextGenUsage struct {
|
||||
PromptTokens int `json:"PromptTokens"`
|
||||
CompletionTokens int `json:"CompletionTokens"`
|
||||
TotalTokens int `json:"TotalTokens"`
|
||||
}
|
||||
|
||||
type hunyuanChatMessage struct {
|
||||
Role string `json:"Role,omitempty"`
|
||||
Content string `json:"Content,omitempty"`
|
||||
}
|
||||
|
||||
func (m *hunyuanProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
// 校验hunyuan id 和 key的合法性
|
||||
if len(config.hunyuanAuthId) != hunyuanAuthIdLen || len(config.hunyuanAuthKey) != hunyuanAuthKeyLen {
|
||||
return errors.New("hunyuanAuthId / hunyuanAuthKey is illegal in config file")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *hunyuanProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &hunyuanProvider{
|
||||
config: config,
|
||||
client: wrapper.NewClusterClient(wrapper.RouteCluster{
|
||||
Host: hunyuanDomain,
|
||||
}),
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type hunyuanProvider struct {
|
||||
config ProviderConfig
|
||||
|
||||
client wrapper.HttpClient
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) GetProviderType() string {
|
||||
return providerTypeHunyuan
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
// log.Debugf("hunyuanProvider.OnRequestHeaders called! hunyunSecretKey/id is: %s/%s", m.config.hunyuanAuthKey, m.config.hunyuanAuthId)
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
_ = util.OverwriteRequestHost(hunyuanDomain)
|
||||
_ = util.OverwriteRequestPath(hunyuanRequestPath)
|
||||
|
||||
// 添加hunyuan需要的自定义字段
|
||||
_ = proxywasm.ReplaceHttpRequestHeader(actionKey, hunyuanChatCompletionTCAction)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader(versionKey, versionValue)
|
||||
|
||||
// 删除一些字段
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
// Delay the header processing to allow changing streaming mode in OnRequestBody
|
||||
return types.HeaderStopIteration, nil
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
// 为header添加时间戳字段 (因为需要根据body进行签名时依赖时间戳,故于body处理部分创建时间戳)
|
||||
var timestamp int64 = time.Now().Unix()
|
||||
_ = proxywasm.ReplaceHttpRequestHeader(timestampKey, fmt.Sprintf("%d", timestamp))
|
||||
// log.Debugf("#debug nash5# OnRequestBody set timestamp header: ", timestamp)
|
||||
|
||||
// 使用混元本身接口的协议
|
||||
if m.config.protocol == protocolOriginal {
|
||||
request := &hunyuanTextGenRequest{}
|
||||
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
|
||||
// 根据确定好的payload进行签名
|
||||
hunyuanBody, _ := json.Marshal(request)
|
||||
authorizedValueNew := GetTC3Authorizationcode(m.config.hunyuanAuthId, m.config.hunyuanAuthKey, timestamp, hunyuanDomain, hunyuanChatCompletionTCAction, string(hunyuanBody))
|
||||
_ = util.OverwriteRequestAuthorization(authorizedValueNew)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Accept", "*/*")
|
||||
// log.Debugf("#debug nash5# OnRequestBody call hunyuan api using original api! signature computation done!")
|
||||
|
||||
// 若无配置文件,直接返回
|
||||
if m.config.context == nil {
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
log.Debugf("#debug nash5# ctx file loaded! callback start, content is: %s", content)
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.hunyuan.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
m.insertContextMessageIntoHunyuanRequest(request, content)
|
||||
|
||||
// 因为手动插入了context内容,这里需要重新计算签名
|
||||
hunyuanBody, _ := json.Marshal(request)
|
||||
authorizedValueNew := GetTC3Authorizationcode(m.config.hunyuanAuthId, m.config.hunyuanAuthKey, timestamp, hunyuanDomain, hunyuanChatCompletionTCAction, string(hunyuanBody))
|
||||
_ = util.OverwriteRequestAuthorization(authorizedValueNew)
|
||||
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.hunyuan.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
log.Debugf("#debug nash5# ctx file load success!")
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
|
||||
log.Debugf("#debug nash5# ctx file load failed!")
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
|
||||
// 使用open ai接口协议
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
// log.Debugf("#debug nash5# OnRequestBody call hunyuan api using openai's api!")
|
||||
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model) // 设置原始请求的model,以便返回值使用
|
||||
mappedModel := getMappedModel(model, m.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model) // 设置真实请求的模型,以便返回值使用
|
||||
|
||||
// 看请求中的stream的设置,相应的我们更该http头
|
||||
streaming := request.Stream
|
||||
if streaming {
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Accept", "text/event-stream")
|
||||
} else {
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Accept", "*/*")
|
||||
}
|
||||
|
||||
// 若没有配置上下文,直接开始请求
|
||||
if m.config.context == nil {
|
||||
hunyuanRequest := m.buildHunyuanTextGenerationRequest(request)
|
||||
|
||||
// 根据确定好的payload进行签名:
|
||||
body, _ := json.Marshal(hunyuanRequest)
|
||||
authorizedValueNew := GetTC3Authorizationcode(
|
||||
m.config.hunyuanAuthId,
|
||||
m.config.hunyuanAuthKey,
|
||||
timestamp,
|
||||
hunyuanDomain,
|
||||
hunyuanChatCompletionTCAction,
|
||||
string(body),
|
||||
)
|
||||
_ = util.OverwriteRequestAuthorization(authorizedValueNew)
|
||||
// log.Debugf("#debug nash5# OnRequestBody done, body is: ", string(body))
|
||||
|
||||
// // 打印所有的headers
|
||||
// headers, err2 := proxywasm.GetHttpRequestHeaders()
|
||||
// if err2 != nil {
|
||||
// log.Errorf("failed to get request headers: %v", err2)
|
||||
// } else {
|
||||
// // 迭代并打印所有请求头
|
||||
// for _, header := range headers {
|
||||
// log.Infof("#debug nash5# inB Request header - %s: %s", header[0], header[1])
|
||||
// }
|
||||
// }
|
||||
return types.ActionContinue, replaceJsonRequestBody(hunyuanRequest, log)
|
||||
}
|
||||
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.hunyuan.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
return
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
hunyuanRequest := m.buildHunyuanTextGenerationRequest(request)
|
||||
|
||||
// 因为手动插入了context内容,这里需要重新计算签名
|
||||
hunyuanBody, _ := json.Marshal(hunyuanRequest)
|
||||
authorizedValueNew := GetTC3Authorizationcode(m.config.hunyuanAuthId, m.config.hunyuanAuthKey, timestamp, hunyuanDomain, hunyuanChatCompletionTCAction, string(hunyuanBody))
|
||||
_ = util.OverwriteRequestAuthorization(authorizedValueNew)
|
||||
|
||||
if err := replaceJsonRequestBody(hunyuanRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.hunyuan.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if m.config.protocol == protocolOriginal {
|
||||
return chunk, nil
|
||||
}
|
||||
|
||||
// hunyuan的流式返回:
|
||||
//data: {"Note":"以上内容为AI生成,不代表开发者立场,请勿删除或修改本标记","Choices":[{"Delta":{"Role":"assistant","Content":"有助于"},"FinishReason":""}],"Created":1716359713,"Id":"086b6b19-8b2c-4def-a65c-db6a7bc86acd","Usage":{"PromptTokens":7,"CompletionTokens":145,"TotalTokens":152}}
|
||||
|
||||
// openai的流式返回
|
||||
// data: {"id": "chatcmpl-7QyqpwdfhqwajicIEznoc6Q47XAyW", "object": "chat.completion.chunk", "created": 1677664795, "model": "gpt-3.5-turbo-0613", "choices": [{"delta": {"content": "The "}, "index": 0, "finish_reason": null}]}
|
||||
|
||||
// log.Debugf("#debug nash5# [OnStreamingResponseBody] chunk is: %s", string(chunk))
|
||||
|
||||
// 从上下文获取现有缓冲区数据
|
||||
newBufferedBody := chunk
|
||||
if bufferedBody, has := ctx.GetContext(ctxKeyStreamingBody).([]byte); has {
|
||||
newBufferedBody = append(bufferedBody, chunk...)
|
||||
}
|
||||
|
||||
// 初始化处理下标,以及将要返回的处理过的chunks
|
||||
var newEventPivot = -1
|
||||
var outputBuffer []byte
|
||||
|
||||
// 从buffer区取出若干完整的chunk,将其转为openAI格式后返回
|
||||
// 处理可能包含多个事件的缓冲区
|
||||
for {
|
||||
eventStartIndex := bytes.Index(newBufferedBody, []byte(ssePrefix))
|
||||
if eventStartIndex == -1 {
|
||||
break // 没有找到新事件,跳出循环
|
||||
}
|
||||
|
||||
// 移除缓冲区前面非事件部分
|
||||
newBufferedBody = newBufferedBody[eventStartIndex+len(ssePrefix):]
|
||||
|
||||
// 查找事件结束的位置(即下一个事件的开始)
|
||||
newEventPivot = bytes.Index(newBufferedBody, []byte("\n\n"))
|
||||
if newEventPivot == -1 && !isLastChunk {
|
||||
// 未找到事件结束标识,跳出循环等待更多数据,若是最后一个chunk,不一定有2个换行符
|
||||
break
|
||||
}
|
||||
|
||||
// 提取并处理一个完整的事件
|
||||
eventData := newBufferedBody[:newEventPivot]
|
||||
// log.Debugf("@@@ <<< ori chun is: %s", string(newBufferedBody[:newEventPivot]))
|
||||
newBufferedBody = newBufferedBody[newEventPivot+2:] // 跳过结束标识
|
||||
|
||||
// 转换并追加到输出缓冲区
|
||||
convertedData, _ := m.convertChunkFromHunyuanToOpenAI(ctx, eventData, log)
|
||||
// log.Debugf("@@@ >>> converted one chunk: %s", string(convertedData))
|
||||
outputBuffer = append(outputBuffer, convertedData...)
|
||||
}
|
||||
|
||||
// 刷新剩余的不完整事件回到上下文缓冲区以便下次继续处理
|
||||
ctx.SetContext(ctxKeyStreamingBody, newBufferedBody)
|
||||
|
||||
log.Debugf("=== modified response chunk: %s", string(outputBuffer))
|
||||
return outputBuffer, nil
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) convertChunkFromHunyuanToOpenAI(ctx wrapper.HttpContext, hunyuanChunk []byte, log wrapper.Log) ([]byte, error) {
|
||||
// 将hunyuan的chunk转为openai的chunk
|
||||
hunyuanFormattedChunk := &hunyuanTextGenDetailedResponseNonStreaming{}
|
||||
if err := json.Unmarshal(hunyuanChunk, hunyuanFormattedChunk); err != nil {
|
||||
return []byte(""), nil
|
||||
}
|
||||
|
||||
openAIFormattedChunk := &chatCompletionResponse{
|
||||
Id: hunyuanFormattedChunk.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletionChunk,
|
||||
Usage: usage{
|
||||
PromptTokens: hunyuanFormattedChunk.Usage.PromptTokens,
|
||||
CompletionTokens: hunyuanFormattedChunk.Usage.CompletionTokens,
|
||||
TotalTokens: hunyuanFormattedChunk.Usage.TotalTokens,
|
||||
},
|
||||
}
|
||||
// tmpStr3, _ := json.Marshal(hunyuanFormattedChunk)
|
||||
// log.Debugf("@@@ --- 源数据是:: %s", tmpStr3)
|
||||
|
||||
// 是否为最后一个chunk?
|
||||
if hunyuanFormattedChunk.Choices[0].FinishReason == hunyuanStreamEndMark {
|
||||
// log.Debugf("@@@ --- 最后chunk: ")
|
||||
openAIFormattedChunk.Choices = append(openAIFormattedChunk.Choices, chatCompletionChoice{
|
||||
FinishReason: hunyuanFormattedChunk.Choices[0].FinishReason,
|
||||
})
|
||||
} else {
|
||||
deltaMsg := chatMessage{
|
||||
Name: "",
|
||||
Role: hunyuanFormattedChunk.Choices[0].Delta.Role,
|
||||
Content: hunyuanFormattedChunk.Choices[0].Delta.Content,
|
||||
ToolCalls: []toolCall{},
|
||||
}
|
||||
|
||||
// tmpStr2, _ := json.Marshal(deltaMsg)
|
||||
// log.Debugf("@@@ --- 中间chunk: choices.chatMsg 是: %s", tmpStr2)
|
||||
|
||||
openAIFormattedChunk.Choices = append(
|
||||
openAIFormattedChunk.Choices,
|
||||
chatCompletionChoice{Delta: &deltaMsg},
|
||||
)
|
||||
// tmpStr, _ := json.Marshal(openAIFormattedChunk.Choices)
|
||||
// log.Debugf("@@@ --- 中间chunk: choices 是: %s", tmpStr)
|
||||
}
|
||||
|
||||
// 返回的格式
|
||||
openAIFormattedChunkBytes, _ := json.Marshal(openAIFormattedChunk)
|
||||
var openAIChunk strings.Builder
|
||||
openAIChunk.WriteString(ssePrefix)
|
||||
openAIChunk.WriteString(string(openAIFormattedChunkBytes))
|
||||
openAIChunk.WriteString("\n\n")
|
||||
|
||||
return []byte(openAIChunk.String()), nil
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
|
||||
log.Debugf("#debug nash5# onRespBody's resp is: %s", string(body))
|
||||
hunyuanResponse := &hunyuanTextGenResponseNonStreaming{}
|
||||
if err := json.Unmarshal(body, hunyuanResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal hunyuan response: %v", err)
|
||||
}
|
||||
|
||||
if m.config.protocol == protocolOriginal {
|
||||
return types.ActionContinue, replaceJsonResponseBody(hunyuanResponse, log)
|
||||
}
|
||||
|
||||
response := m.buildChatCompletionResponse(ctx, hunyuanResponse)
|
||||
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) insertContextMessageIntoHunyuanRequest(request *hunyuanTextGenRequest, content string) {
|
||||
|
||||
fileMessage := hunyuanChatMessage{
|
||||
Role: roleSystem,
|
||||
Content: content,
|
||||
}
|
||||
messages := request.Messages
|
||||
request.Messages = append([]hunyuanChatMessage{},
|
||||
append([]hunyuanChatMessage{fileMessage}, messages...)...,
|
||||
)
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) buildHunyuanTextGenerationRequest(request *chatCompletionRequest) *hunyuanTextGenRequest {
|
||||
hunyuanRequest := &hunyuanTextGenRequest{
|
||||
Model: request.Model,
|
||||
Messages: convertMessagesFromOpenAIToHunyuan(request.Messages),
|
||||
Stream: request.Stream,
|
||||
StreamModeration: false,
|
||||
TopP: float32(request.TopP),
|
||||
Temperature: float32(request.Temperature),
|
||||
EnableEnhancement: false,
|
||||
}
|
||||
|
||||
return hunyuanRequest
|
||||
}
|
||||
|
||||
func convertMessagesFromOpenAIToHunyuan(openAIMessages []chatMessage) []hunyuanChatMessage {
|
||||
// 将chatgpt的messages转换为hunyuan的messages
|
||||
hunyuanChatMessages := make([]hunyuanChatMessage, 0, len(openAIMessages))
|
||||
for _, msg := range openAIMessages {
|
||||
hunyuanChatMessages = append(hunyuanChatMessages, hunyuanChatMessage{
|
||||
Role: msg.Role,
|
||||
Content: msg.Content,
|
||||
})
|
||||
}
|
||||
|
||||
return hunyuanChatMessages
|
||||
}
|
||||
|
||||
func (m *hunyuanProvider) buildChatCompletionResponse(ctx wrapper.HttpContext, hunyuanResponse *hunyuanTextGenResponseNonStreaming) *chatCompletionResponse {
|
||||
choices := make([]chatCompletionChoice, 0, len(hunyuanResponse.Response.Choices))
|
||||
for _, choice := range hunyuanResponse.Response.Choices {
|
||||
choices = append(choices, chatCompletionChoice{
|
||||
Message: &chatMessage{
|
||||
Name: "",
|
||||
Role: choice.Message.Role,
|
||||
Content: choice.Message.Content,
|
||||
ToolCalls: nil,
|
||||
},
|
||||
FinishReason: choice.FinishReason,
|
||||
})
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: hunyuanResponse.Response.Id,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletion,
|
||||
Choices: choices,
|
||||
Usage: usage{
|
||||
PromptTokens: hunyuanResponse.Response.Usage.PromptTokens,
|
||||
CompletionTokens: hunyuanResponse.Response.Usage.CompletionTokens,
|
||||
TotalTokens: hunyuanResponse.Response.Usage.TotalTokens,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func Sha256hex(s string) string {
|
||||
b := sha256.Sum256([]byte(s))
|
||||
return hex.EncodeToString(b[:])
|
||||
}
|
||||
|
||||
func Hmacsha256(s, key string) string {
|
||||
hashed := hmac.New(sha256.New, []byte(key))
|
||||
hashed.Write([]byte(s))
|
||||
return string(hashed.Sum(nil))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param secretId 秘钥id
|
||||
* @param secretKey 秘钥
|
||||
* @param timestamp 时间戳
|
||||
* @param host 目标域名
|
||||
* @param action 请求动作
|
||||
* @param payload 请求体
|
||||
* @return 签名
|
||||
*/
|
||||
func GetTC3Authorizationcode(secretId string, secretKey string, timestamp int64, host string, action string, payload string) string {
|
||||
algorithm := "TC3-HMAC-SHA256"
|
||||
service := "hunyuan" // 注意,必须和域名中的产品名保持一致
|
||||
|
||||
// step 1: build canonical request string
|
||||
httpRequestMethod := "POST"
|
||||
canonicalURI := "/"
|
||||
canonicalQueryString := ""
|
||||
canonicalHeaders := fmt.Sprintf("content-type:%s\nhost:%s\nx-tc-action:%s\n",
|
||||
"application/json", host, strings.ToLower(action))
|
||||
signedHeaders := "content-type;host;x-tc-action"
|
||||
|
||||
// fmt.Println("payload is: %s", payload)
|
||||
hashedRequestPayload := Sha256hex(payload)
|
||||
canonicalRequest := fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s",
|
||||
httpRequestMethod,
|
||||
canonicalURI,
|
||||
canonicalQueryString,
|
||||
canonicalHeaders,
|
||||
signedHeaders,
|
||||
hashedRequestPayload)
|
||||
// fmt.Println(canonicalRequest)
|
||||
|
||||
// step 2: build string to sign
|
||||
date := time.Unix(timestamp, 0).UTC().Format("2006-01-02")
|
||||
credentialScope := fmt.Sprintf("%s/%s/tc3_request", date, service)
|
||||
hashedCanonicalRequest := Sha256hex(canonicalRequest)
|
||||
string2sign := fmt.Sprintf("%s\n%d\n%s\n%s",
|
||||
algorithm,
|
||||
timestamp,
|
||||
credentialScope,
|
||||
hashedCanonicalRequest)
|
||||
// fmt.Println(string2sign)
|
||||
|
||||
// step 3: sign string
|
||||
secretDate := Hmacsha256(date, "TC3"+secretKey)
|
||||
secretService := Hmacsha256(service, secretDate)
|
||||
secretSigning := Hmacsha256("tc3_request", secretService)
|
||||
signature := hex.EncodeToString([]byte(Hmacsha256(string2sign, secretSigning)))
|
||||
// fmt.Println(signature)
|
||||
|
||||
// step 4: build authorization
|
||||
authorization := fmt.Sprintf("%s Credential=%s/%s, SignedHeaders=%s, Signature=%s",
|
||||
algorithm,
|
||||
secretId,
|
||||
credentialScope,
|
||||
signedHeaders,
|
||||
signature)
|
||||
|
||||
// curl := fmt.Sprintf(`curl -X POST https://%s \
|
||||
// -H "Authorization: %s" \
|
||||
// -H "Content-Type: application/json" \
|
||||
// -H "Host: %s" -H "X-TC-Action: %s" \
|
||||
// -H "X-TC-Timestamp: %d" \
|
||||
// -H "X-TC-Version: 2023-09-01" \
|
||||
// -d '%s'`, host, authorization, host, action, timestamp, payload)
|
||||
// fmt.Println(curl)
|
||||
return authorization
|
||||
}
|
||||
476
plugins/wasm-go/extensions/ai-proxy/provider/minimax.go
Normal file
476
plugins/wasm-go/extensions/ai-proxy/provider/minimax.go
Normal file
@@ -0,0 +1,476 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
// minimaxProvider is the provider for minimax service.
|
||||
|
||||
const (
|
||||
minimaxDomain = "api.minimax.chat"
|
||||
// minimaxChatCompletionV2Path 接口请求响应格式与OpenAI相同
|
||||
// 接口文档: https://platform.minimaxi.com/document/guides/chat-model/V2?id=65e0736ab2845de20908e2dd
|
||||
minimaxChatCompletionV2Path = "/v1/text/chatcompletion_v2"
|
||||
// minimaxChatCompletionProPath 接口请求响应格式与OpenAI不同
|
||||
// 接口文档: https://platform.minimaxi.com/document/guides/chat-model/pro/api?id=6569c85948bc7b684b30377e
|
||||
minimaxChatCompletionProPath = "/v1/text/chatcompletion_pro"
|
||||
|
||||
senderTypeUser string = "USER" // 用户发送的内容
|
||||
senderTypeBot string = "BOT" // 模型生成的内容
|
||||
|
||||
// 默认机器人设置
|
||||
defaultBotName string = "MM智能助理"
|
||||
defaultBotSettingContent string = "MM智能助理是一款由MiniMax自研的,没有调用其他产品的接口的大型语言模型。MiniMax是一家中国科技公司,一直致力于进行大模型相关的研究。"
|
||||
defaultSenderName string = "小明"
|
||||
)
|
||||
|
||||
// chatCompletionProModels 这些模型对应接口为ChatCompletion Pro
|
||||
var chatCompletionProModels = map[string]struct{}{
|
||||
"abab6.5-chat": {},
|
||||
"abab6.5s-chat": {},
|
||||
"abab5.5s-chat": {},
|
||||
"abab5.5-chat": {},
|
||||
}
|
||||
|
||||
type minimaxProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (m *minimaxProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
// 如果存在模型对应接口为ChatCompletion Pro必须配置minimaxGroupId
|
||||
if len(config.modelMapping) > 0 && config.minimaxGroupId == "" {
|
||||
for _, minimaxModel := range config.modelMapping {
|
||||
if _, exists := chatCompletionProModels[minimaxModel]; exists {
|
||||
return errors.New(fmt.Sprintf("missing minimaxGroupId in provider config when %s model is provided", minimaxModel))
|
||||
}
|
||||
}
|
||||
}
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *minimaxProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &minimaxProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type minimaxProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) GetProviderType() string {
|
||||
return providerTypeMinimax
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestHost(minimaxDomain)
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
|
||||
// Delay the header processing to allow changing streaming mode in OnRequestBody
|
||||
return types.HeaderStopIteration, nil
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
// 解析并映射模型,设置上下文
|
||||
model, err := m.parseModel(body)
|
||||
if err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, m.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, mappedModel)
|
||||
_, ok := chatCompletionProModels[mappedModel]
|
||||
if ok {
|
||||
// 使用ChatCompletion Pro接口
|
||||
return m.handleRequestBodyByChatCompletionPro(body, log)
|
||||
} else {
|
||||
// 使用ChatCompletion v2接口
|
||||
return m.handleRequestBodyByChatCompletionV2(body, log)
|
||||
}
|
||||
}
|
||||
|
||||
// handleRequestBodyByChatCompletionPro 使用ChatCompletion Pro接口处理请求体
|
||||
func (m *minimaxProvider) handleRequestBodyByChatCompletionPro(body []byte, log wrapper.Log) (types.Action, error) {
|
||||
// 使用minimax接口协议
|
||||
if m.config.protocol == protocolOriginal {
|
||||
request := &minimaxChatCompletionV2Request{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("request model is empty")
|
||||
}
|
||||
// 根据模型重写requestPath
|
||||
if m.config.minimaxGroupId == "" {
|
||||
return types.ActionContinue, errors.New(fmt.Sprintf("missing minimaxGroupId in provider config when use %s model ", request.Model))
|
||||
}
|
||||
_ = util.OverwriteRequestPath(fmt.Sprintf("%s?GroupId=%s", minimaxChatCompletionProPath, m.config.minimaxGroupId))
|
||||
|
||||
if m.config.context == nil {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
m.setBotSettings(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// 映射模型重写requestPath
|
||||
request.Model = getMappedModel(request.Model, m.config.modelMapping, log)
|
||||
_ = util.OverwriteRequestPath(fmt.Sprintf("%s?GroupId=%s", minimaxChatCompletionProPath, m.config.minimaxGroupId))
|
||||
|
||||
if m.config.context == nil {
|
||||
minimaxRequest := m.buildMinimaxChatCompletionV2Request(request, "")
|
||||
return types.ActionContinue, replaceJsonRequestBody(minimaxRequest, log)
|
||||
}
|
||||
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
minimaxRequest := m.buildMinimaxChatCompletionV2Request(request, content)
|
||||
if err := replaceJsonRequestBody(minimaxRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace Request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// handleRequestBodyByChatCompletionV2 使用ChatCompletion v2接口处理请求体
|
||||
func (m *minimaxProvider) handleRequestBodyByChatCompletionV2(body []byte, log wrapper.Log) (types.Action, error) {
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
// 映射模型重写requestPath
|
||||
request.Model = getMappedModel(request.Model, m.config.modelMapping, log)
|
||||
_ = util.OverwriteRequestPath(minimaxChatCompletionV2Path)
|
||||
|
||||
if m.contextCache == nil {
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.minimax.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
// 使用minimax接口协议,跳过OnStreamingResponseBody()和OnResponseBody()
|
||||
if m.config.protocol == protocolOriginal {
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
// 模型对应接口为ChatCompletion v2,跳过OnStreamingResponseBody()和OnResponseBody()
|
||||
model := ctx.GetStringContext(ctxKeyFinalRequestModel, "")
|
||||
if model != "" {
|
||||
_, ok := chatCompletionProModels[model]
|
||||
if !ok {
|
||||
ctx.DontReadResponseBody()
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
}
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
// OnStreamingResponseBody 只处理使用OpenAI协议 且 模型对应接口为ChatCompletion Pro的流式响应
|
||||
func (m *minimaxProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if isLastChunk || len(chunk) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
// sample event response:
|
||||
// data: {"created":1689747645,"model":"abab6.5s-chat","reply":"","choices":[{"messages":[{"sender_type":"BOT","sender_name":"MM智能助理","text":"am from China."}]}],"output_sensitive":false}
|
||||
|
||||
// sample end event response:
|
||||
// data: {"created":1689747645,"model":"abab6.5s-chat","reply":"I am from China.","choices":[{"finish_reason":"stop","messages":[{"sender_type":"BOT","sender_name":"MM智能助理","text":"I am from China."}]}],"usage":{"total_tokens":187},"input_sensitive":false,"output_sensitive":false,"id":"0106b3bc9fd844a9f3de1aa06004e2ab","base_resp":{"status_code":0,"status_msg":""}}
|
||||
responseBuilder := &strings.Builder{}
|
||||
lines := strings.Split(string(chunk), "\n")
|
||||
for _, data := range lines {
|
||||
if len(data) < 6 {
|
||||
// ignore blank line or wrong format
|
||||
continue
|
||||
}
|
||||
data = data[6:]
|
||||
var minimaxResp minimaxChatCompletionV2Resp
|
||||
if err := json.Unmarshal([]byte(data), &minimaxResp); err != nil {
|
||||
log.Errorf("unable to unmarshal minimax response: %v", err)
|
||||
continue
|
||||
}
|
||||
response := m.responseV2ToOpenAI(&minimaxResp)
|
||||
responseBody, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
log.Errorf("unable to marshal response: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
m.appendResponse(responseBuilder, string(responseBody))
|
||||
}
|
||||
modifiedResponseChunk := responseBuilder.String()
|
||||
log.Debugf("=== modified response chunk: %s", modifiedResponseChunk)
|
||||
return []byte(modifiedResponseChunk), nil
|
||||
}
|
||||
|
||||
// OnResponseBody 只处理使用OpenAI协议 且 模型对应接口为ChatCompletion Pro的流式响应
|
||||
func (m *minimaxProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
minimaxResp := &minimaxChatCompletionV2Resp{}
|
||||
if err := json.Unmarshal(body, minimaxResp); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal minimax response: %v", err)
|
||||
}
|
||||
if minimaxResp.BaseResp.StatusCode != 0 {
|
||||
return types.ActionContinue, fmt.Errorf("minimax response error, error_code: %d, error_message: %s", minimaxResp.BaseResp.StatusCode, minimaxResp.BaseResp.StatusMsg)
|
||||
}
|
||||
response := m.responseV2ToOpenAI(minimaxResp)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
// minimaxChatCompletionV2Request 表示ChatCompletion V2请求的结构体
|
||||
type minimaxChatCompletionV2Request struct {
|
||||
Model string `json:"model"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
TokensToGenerate int64 `json:"tokens_to_generate,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
TopP float64 `json:"top_p,omitempty"`
|
||||
MaskSensitiveInfo bool `json:"mask_sensitive_info"` // 是否开启隐私信息打码,默认true
|
||||
Messages []minimaxMessage `json:"messages"`
|
||||
BotSettings []minimaxBotSetting `json:"bot_setting"`
|
||||
ReplyConstraints minimaxReplyConstraints `json:"reply_constraints"`
|
||||
}
|
||||
|
||||
// minimaxMessage 表示对话中的消息
|
||||
type minimaxMessage struct {
|
||||
SenderType string `json:"sender_type"`
|
||||
SenderName string `json:"sender_name"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// minimaxBotSetting 表示机器人的设置
|
||||
type minimaxBotSetting struct {
|
||||
BotName string `json:"bot_name"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// minimaxReplyConstraints 表示模型回复要求
|
||||
type minimaxReplyConstraints struct {
|
||||
SenderType string `json:"sender_type"`
|
||||
SenderName string `json:"sender_name"`
|
||||
}
|
||||
|
||||
// minimaxChatCompletionV2Resp Minimax Chat Completion V2响应结构体
|
||||
type minimaxChatCompletionV2Resp struct {
|
||||
Created int64 `json:"created"`
|
||||
Model string `json:"model"`
|
||||
Reply string `json:"reply"`
|
||||
InputSensitive bool `json:"input_sensitive,omitempty"`
|
||||
InputSensitiveType int64 `json:"input_sensitive_type,omitempty"`
|
||||
OutputSensitive bool `json:"output_sensitive,omitempty"`
|
||||
OutputSensitiveType int64 `json:"output_sensitive_type,omitempty"`
|
||||
Choices []minimaxChoice `json:"choices,omitempty"`
|
||||
Usage minimaxUsage `json:"usage,omitempty"`
|
||||
Id string `json:"id"`
|
||||
BaseResp minimaxBaseResp `json:"base_resp"`
|
||||
}
|
||||
|
||||
// minimaxBaseResp 包含错误状态码和详情
|
||||
type minimaxBaseResp struct {
|
||||
StatusCode int64 `json:"status_code"`
|
||||
StatusMsg string `json:"status_msg"`
|
||||
}
|
||||
|
||||
// minimaxChoice 结果选项
|
||||
type minimaxChoice struct {
|
||||
Messages []minimaxMessage `json:"messages"`
|
||||
Index int64 `json:"index"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
}
|
||||
|
||||
// minimaxUsage 令牌使用情况
|
||||
type minimaxUsage struct {
|
||||
TotalTokens int64 `json:"total_tokens"`
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) parseModel(body []byte) (string, error) {
|
||||
var tempMap map[string]interface{}
|
||||
if err := json.Unmarshal(body, &tempMap); err != nil {
|
||||
return "", err
|
||||
}
|
||||
model, ok := tempMap["model"].(string)
|
||||
if !ok {
|
||||
return "", errors.New("missing model in chat completion request")
|
||||
}
|
||||
return model, nil
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) setBotSettings(request *minimaxChatCompletionV2Request, botSettingContent string) {
|
||||
if len(request.BotSettings) == 0 {
|
||||
request.BotSettings = []minimaxBotSetting{
|
||||
{
|
||||
BotName: defaultBotName,
|
||||
Content: func() string {
|
||||
if botSettingContent != "" {
|
||||
return botSettingContent
|
||||
}
|
||||
return defaultBotSettingContent
|
||||
}(),
|
||||
},
|
||||
}
|
||||
} else if botSettingContent != "" {
|
||||
newSetting := minimaxBotSetting{
|
||||
BotName: request.BotSettings[0].BotName,
|
||||
Content: botSettingContent,
|
||||
}
|
||||
request.BotSettings = append([]minimaxBotSetting{newSetting}, request.BotSettings...)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) buildMinimaxChatCompletionV2Request(request *chatCompletionRequest, botSettingContent string) *minimaxChatCompletionV2Request {
|
||||
var messages []minimaxMessage
|
||||
var botSetting []minimaxBotSetting
|
||||
var botName string
|
||||
|
||||
determineName := func(name string, defaultName string) string {
|
||||
if name != "" {
|
||||
return name
|
||||
}
|
||||
return defaultName
|
||||
}
|
||||
|
||||
for _, message := range request.Messages {
|
||||
switch message.Role {
|
||||
case roleSystem:
|
||||
botName = determineName(message.Name, defaultBotName)
|
||||
botSetting = append(botSetting, minimaxBotSetting{
|
||||
BotName: botName,
|
||||
Content: message.Content,
|
||||
})
|
||||
case roleAssistant:
|
||||
messages = append(messages, minimaxMessage{
|
||||
SenderType: senderTypeBot,
|
||||
SenderName: determineName(message.Name, defaultBotName),
|
||||
Text: message.Content,
|
||||
})
|
||||
case roleUser:
|
||||
messages = append(messages, minimaxMessage{
|
||||
SenderType: senderTypeUser,
|
||||
SenderName: determineName(message.Name, defaultSenderName),
|
||||
Text: message.Content,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
replyConstraints := minimaxReplyConstraints{
|
||||
SenderType: senderTypeBot,
|
||||
SenderName: determineName(botName, defaultBotName),
|
||||
}
|
||||
result := &minimaxChatCompletionV2Request{
|
||||
Model: request.Model,
|
||||
Stream: request.Stream,
|
||||
TokensToGenerate: int64(request.MaxTokens),
|
||||
Temperature: request.Temperature,
|
||||
TopP: request.TopP,
|
||||
MaskSensitiveInfo: true,
|
||||
Messages: messages,
|
||||
BotSettings: botSetting,
|
||||
ReplyConstraints: replyConstraints,
|
||||
}
|
||||
|
||||
m.setBotSettings(result, botSettingContent)
|
||||
return result
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) responseV2ToOpenAI(response *minimaxChatCompletionV2Resp) *chatCompletionResponse {
|
||||
var choices []chatCompletionChoice
|
||||
messageIndex := 0
|
||||
for _, choice := range response.Choices {
|
||||
for _, message := range choice.Messages {
|
||||
message := &chatMessage{
|
||||
Name: message.SenderName,
|
||||
Role: roleAssistant,
|
||||
Content: message.Text,
|
||||
}
|
||||
choices = append(choices, chatCompletionChoice{
|
||||
FinishReason: choice.FinishReason,
|
||||
Index: messageIndex,
|
||||
Message: message,
|
||||
})
|
||||
messageIndex++
|
||||
}
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Id,
|
||||
Object: objectChatCompletion,
|
||||
Created: response.Created,
|
||||
Model: response.Model,
|
||||
Choices: choices,
|
||||
Usage: usage{
|
||||
TotalTokens: int(response.Usage.TotalTokens),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (m *minimaxProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) {
|
||||
responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody))
|
||||
}
|
||||
@@ -30,6 +30,7 @@ type chatCompletionRequest struct {
|
||||
Tools []tool `json:"tools,omitempty"`
|
||||
ToolChoice *toolChoice `json:"tool_choice,omitempty"`
|
||||
User string `json:"user,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
}
|
||||
|
||||
type streamOptions struct {
|
||||
@@ -59,7 +60,7 @@ type chatCompletionResponse struct {
|
||||
Model string `json:"model,omitempty"`
|
||||
SystemFingerprint string `json:"system_fingerprint,omitempty"`
|
||||
Object string `json:"object,omitempty"`
|
||||
Usage chatCompletionUsage `json:"usage,omitempty"`
|
||||
Usage usage `json:"usage,omitempty"`
|
||||
}
|
||||
|
||||
type chatCompletionChoice struct {
|
||||
@@ -69,7 +70,7 @@ type chatCompletionChoice struct {
|
||||
FinishReason string `json:"finish_reason,omitempty"`
|
||||
}
|
||||
|
||||
type chatCompletionUsage struct {
|
||||
type usage struct {
|
||||
PromptTokens int `json:"prompt_tokens,omitempty"`
|
||||
CompletionTokens int `json:"completion_tokens,omitempty"`
|
||||
TotalTokens int `json:"total_tokens,omitempty"`
|
||||
@@ -139,3 +140,43 @@ func (e *streamEvent) setValue(key, value string) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type embeddingsRequest struct {
|
||||
Input interface{} `json:"input"`
|
||||
Model string `json:"model"`
|
||||
EncodingFormat string `json:"encoding_format,omitempty"`
|
||||
Dimensions int `json:"dimensions,omitempty"`
|
||||
User string `json:"user,omitempty"`
|
||||
}
|
||||
|
||||
type embeddingsResponse struct {
|
||||
Object string `json:"object"`
|
||||
Data []embedding `json:"data"`
|
||||
Model string `json:"model"`
|
||||
Usage usage `json:"usage"`
|
||||
}
|
||||
|
||||
type embedding struct {
|
||||
Object string `json:"object"`
|
||||
Index int `json:"index"`
|
||||
Embedding []float64 `json:"embedding"`
|
||||
}
|
||||
|
||||
func (r embeddingsRequest) ParseInput() []string {
|
||||
if r.Input == nil {
|
||||
return nil
|
||||
}
|
||||
var input []string
|
||||
switch r.Input.(type) {
|
||||
case string:
|
||||
input = []string{r.Input.(string)}
|
||||
case []any:
|
||||
input = make([]string, 0, len(r.Input.([]any)))
|
||||
for _, item := range r.Input.([]any) {
|
||||
if str, ok := item.(string); ok {
|
||||
input = append(input, str)
|
||||
}
|
||||
}
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
@@ -26,6 +26,9 @@ func (m *moonshotProviderInitializer) ValidateConfig(config ProviderConfig) erro
|
||||
if config.moonshotFileId != "" && config.context != nil {
|
||||
return errors.New("moonshotFileId and context cannot be configured at the same time")
|
||||
}
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -57,7 +60,7 @@ func (m *moonshotProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName Api
|
||||
}
|
||||
_ = util.OverwriteRequestPath(moonshotChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(moonshotDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
@@ -92,12 +95,12 @@ func (m *moonshotProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiNam
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.moonshot.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
return
|
||||
}
|
||||
err = m.performChatCompletion(ctx, content, request, log)
|
||||
if err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to perform chat completion: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.moonshot.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to perform chat completion: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
@@ -68,7 +68,7 @@ func (m *ollamaProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName,
|
||||
if m.config.modelMapping == nil && m.contextCache == nil {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
@@ -83,7 +83,7 @@ func (m *ollamaProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName,
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
|
||||
|
||||
if m.contextCache != nil {
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
@@ -91,11 +91,11 @@ func (m *ollamaProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName,
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.ollama.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.ollama.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
@@ -105,7 +105,7 @@ func (m *ollamaProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName,
|
||||
}
|
||||
} else {
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.ollama.transform_body_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
|
||||
@@ -2,6 +2,7 @@ package provider
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
@@ -12,8 +13,9 @@ import (
|
||||
// openaiProvider is the provider for OpenAI service.
|
||||
|
||||
const (
|
||||
openaiDomain = "api.openai.com"
|
||||
openaiChatCompletionPath = "/v1/chat/completions"
|
||||
defaultOpenaiDomain = "api.openai.com"
|
||||
defaultOpenaiChatCompletionPath = "/v1/chat/completions"
|
||||
defaultOpenaiEmbeddingsPath = "/v1/chat/embeddings"
|
||||
)
|
||||
|
||||
type openaiProviderInitializer struct {
|
||||
@@ -24,14 +26,29 @@ func (m *openaiProviderInitializer) ValidateConfig(config ProviderConfig) error
|
||||
}
|
||||
|
||||
func (m *openaiProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
if config.openaiCustomUrl == "" {
|
||||
return &openaiProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
customUrl := strings.TrimPrefix(strings.TrimPrefix(config.openaiCustomUrl, "http://"), "https://")
|
||||
pairs := strings.SplitN(customUrl, "/", 2)
|
||||
if len(pairs) != 2 {
|
||||
return nil, fmt.Errorf("invalid openaiCustomUrl:%s", config.openaiCustomUrl)
|
||||
}
|
||||
return &openaiProvider{
|
||||
config: config,
|
||||
customDomain: pairs[0],
|
||||
customPath: "/" + pairs[1],
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type openaiProvider struct {
|
||||
config ProviderConfig
|
||||
customDomain string
|
||||
customPath string
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
@@ -40,44 +57,63 @@ func (m *openaiProvider) GetProviderType() string {
|
||||
}
|
||||
|
||||
func (m *openaiProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(openaiChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(openaiDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
|
||||
if m.contextCache == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
if m.customPath == "" {
|
||||
switch apiName {
|
||||
case ApiNameChatCompletion:
|
||||
_ = util.OverwriteRequestPath(defaultOpenaiChatCompletionPath)
|
||||
case ApiNameEmbeddings:
|
||||
ctx.DontReadRequestBody()
|
||||
_ = util.OverwriteRequestPath(defaultOpenaiEmbeddingsPath)
|
||||
}
|
||||
} else {
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
_ = util.OverwriteRequestPath(m.customPath)
|
||||
}
|
||||
|
||||
if m.customDomain == "" {
|
||||
_ = util.OverwriteRequestHost(defaultOpenaiDomain)
|
||||
} else {
|
||||
_ = util.OverwriteRequestHost(m.customDomain)
|
||||
}
|
||||
if len(m.config.apiTokens) > 0 {
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
}
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (m *openaiProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
if m.contextCache == nil {
|
||||
// We don't need to process the request body for other APIs.
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
if request.Stream {
|
||||
// For stream requests, we need to include usage in the response.
|
||||
if request.StreamOptions == nil {
|
||||
request.StreamOptions = &streamOptions{IncludeUsage: true}
|
||||
} else if !request.StreamOptions.IncludeUsage {
|
||||
request.StreamOptions.IncludeUsage = true
|
||||
}
|
||||
}
|
||||
if m.contextCache == nil {
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.openai.set_include_usage_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.openai.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.openai.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
|
||||
@@ -3,6 +3,7 @@ package provider
|
||||
import (
|
||||
"errors"
|
||||
"math/rand"
|
||||
"strings"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
@@ -14,24 +15,39 @@ type Pointcut string
|
||||
|
||||
const (
|
||||
ApiNameChatCompletion ApiName = "chatCompletion"
|
||||
ApiNameEmbeddings ApiName = "embeddings"
|
||||
|
||||
providerTypeMoonshot = "moonshot"
|
||||
providerTypeAzure = "azure"
|
||||
providerTypeQwen = "qwen"
|
||||
providerTypeOpenAI = "openai"
|
||||
providerTypeGroq = "groq"
|
||||
providerTypeBaichuan = "baichuan"
|
||||
providerTypeYi = "yi"
|
||||
providerTypeDeepSeek = "deepseek"
|
||||
providerTypeZhipuAi = "zhipuai"
|
||||
providerTypeOllama = "ollama"
|
||||
providerTypeMoonshot = "moonshot"
|
||||
providerTypeAzure = "azure"
|
||||
providerTypeQwen = "qwen"
|
||||
providerTypeOpenAI = "openai"
|
||||
providerTypeGroq = "groq"
|
||||
providerTypeBaichuan = "baichuan"
|
||||
providerTypeYi = "yi"
|
||||
providerTypeDeepSeek = "deepseek"
|
||||
providerTypeZhipuAi = "zhipuai"
|
||||
providerTypeOllama = "ollama"
|
||||
providerTypeClaude = "claude"
|
||||
providerTypeBaidu = "baidu"
|
||||
providerTypeHunyuan = "hunyuan"
|
||||
providerTypeStepfun = "stepfun"
|
||||
providerTypeMinimax = "minimax"
|
||||
providerTypeCloudflare = "cloudflare"
|
||||
providerTypeSpark = "spark"
|
||||
providerTypeGemini = "gemini"
|
||||
|
||||
protocolOpenAI = "openai"
|
||||
protocolOriginal = "original"
|
||||
|
||||
roleSystem = "system"
|
||||
roleSystem = "system"
|
||||
roleAssistant = "assistant"
|
||||
roleUser = "user"
|
||||
|
||||
finishReasonStop = "stop"
|
||||
finishReasonLength = "length"
|
||||
|
||||
ctxKeyIncrementalStreaming = "incrementalStreaming"
|
||||
ctxKeyApiName = "apiKey"
|
||||
ctxKeyStreamingBody = "streamingBody"
|
||||
ctxKeyOriginalRequestModel = "originalRequestModel"
|
||||
ctxKeyFinalRequestModel = "finalRequestModel"
|
||||
@@ -54,16 +70,24 @@ var (
|
||||
errUnsupportedApiName = errors.New("unsupported API name")
|
||||
|
||||
providerInitializers = map[string]providerInitializer{
|
||||
providerTypeMoonshot: &moonshotProviderInitializer{},
|
||||
providerTypeAzure: &azureProviderInitializer{},
|
||||
providerTypeQwen: &qwenProviderInitializer{},
|
||||
providerTypeOpenAI: &openaiProviderInitializer{},
|
||||
providerTypeGroq: &groqProviderInitializer{},
|
||||
providerTypeBaichuan: &baichuanProviderInitializer{},
|
||||
providerTypeYi: &yiProviderInitializer{},
|
||||
providerTypeDeepSeek: &deepseekProviderInitializer{},
|
||||
providerTypeZhipuAi: &zhipuAiProviderInitializer{},
|
||||
providerTypeOllama: &ollamaProviderInitializer{},
|
||||
providerTypeMoonshot: &moonshotProviderInitializer{},
|
||||
providerTypeAzure: &azureProviderInitializer{},
|
||||
providerTypeQwen: &qwenProviderInitializer{},
|
||||
providerTypeOpenAI: &openaiProviderInitializer{},
|
||||
providerTypeGroq: &groqProviderInitializer{},
|
||||
providerTypeBaichuan: &baichuanProviderInitializer{},
|
||||
providerTypeYi: &yiProviderInitializer{},
|
||||
providerTypeDeepSeek: &deepseekProviderInitializer{},
|
||||
providerTypeZhipuAi: &zhipuAiProviderInitializer{},
|
||||
providerTypeOllama: &ollamaProviderInitializer{},
|
||||
providerTypeClaude: &claudeProviderInitializer{},
|
||||
providerTypeBaidu: &baiduProviderInitializer{},
|
||||
providerTypeHunyuan: &hunyuanProviderInitializer{},
|
||||
providerTypeStepfun: &stepfunProviderInitializer{},
|
||||
providerTypeMinimax: &minimaxProviderInitializer{},
|
||||
providerTypeCloudflare: &cloudflareProviderInitializer{},
|
||||
providerTypeSpark: &sparkProviderInitializer{},
|
||||
providerTypeGemini: &geminiProviderInitializer{},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -93,7 +117,7 @@ type ResponseBodyHandler interface {
|
||||
|
||||
type ProviderConfig struct {
|
||||
// @Title zh-CN AI服务提供商
|
||||
// @Description zh-CN AI服务提供商类型,目前支持的取值为:"moonshot"、"qwen"、"openai"、"azure"、"baichuan"、"yi"、"zhipuai"、"ollama"
|
||||
// @Description zh-CN AI服务提供商类型
|
||||
typ string `required:"true" yaml:"type" json:"type"`
|
||||
// @Title zh-CN API Tokens
|
||||
// @Description zh-CN 在请求AI服务时用于认证的API Token列表。不同的AI服务提供商可能有不同的名称。部分供应商只支持配置一个API Token(如Azure OpenAI)。
|
||||
@@ -101,6 +125,9 @@ type ProviderConfig struct {
|
||||
// @Title zh-CN 请求超时
|
||||
// @Description zh-CN 请求AI服务的超时时间,单位为毫秒。默认值为120000,即2分钟
|
||||
timeout uint32 `required:"false" yaml:"timeout" json:"timeout"`
|
||||
// @Title zh-CN 基于OpenAI协议的自定义后端URL
|
||||
// @Description zh-CN 仅适用于支持 openai 协议的服务。
|
||||
openaiCustomUrl string `required:"false" yaml:"openaiCustomUrl" json:"openaiCustomUrl"`
|
||||
// @Title zh-CN Moonshot File ID
|
||||
// @Description zh-CN 仅适用于Moonshot AI服务。Moonshot AI服务的文件ID,其内容用于补充AI请求上下文
|
||||
moonshotFileId string `required:"false" yaml:"moonshotFileId" json:"moonshotFileId"`
|
||||
@@ -119,6 +146,15 @@ type ProviderConfig struct {
|
||||
// @Title zh-CN Ollama Server Port
|
||||
// @Description zh-CN 仅适用于 Ollama 服务。Ollama 服务器的端口号。
|
||||
ollamaServerPort uint32 `required:"false" yaml:"ollamaServerPort" json:"ollamaServerPort"`
|
||||
// @Title zh-CN hunyuan api key for authorization
|
||||
// @Description zh-CN 仅适用于Hun Yuan AI服务鉴权,API key/id 参考:https://cloud.tencent.com/document/api/1729/101843#Golang
|
||||
hunyuanAuthKey string `required:"false" yaml:"hunyuanAuthKey" json:"hunyuanAuthKey"`
|
||||
// @Title zh-CN hunyuan api id for authorization
|
||||
// @Description zh-CN 仅适用于Hun Yuan AI服务鉴权
|
||||
hunyuanAuthId string `required:"false" yaml:"hunyuanAuthId" json:"hunyuanAuthId"`
|
||||
// @Title zh-CN minimax group id
|
||||
// @Description zh-CN 仅适用于minimax使用ChatCompletion Pro接口的模型
|
||||
minimaxGroupId string `required:"false" yaml:"minimaxGroupId" json:"minimaxGroupId"`
|
||||
// @Title zh-CN 模型名称映射表
|
||||
// @Description zh-CN 用于将请求中的模型名称映射为目标AI服务商支持的模型名称。支持通过“*”来配置全局映射
|
||||
modelMapping map[string]string `required:"false" yaml:"modelMapping" json:"modelMapping"`
|
||||
@@ -128,6 +164,15 @@ type ProviderConfig struct {
|
||||
// @Title zh-CN 模型对话上下文
|
||||
// @Description zh-CN 配置一个外部获取对话上下文的文件来源,用于在AI请求中补充对话上下文
|
||||
context *ContextConfig `required:"false" yaml:"context" json:"context"`
|
||||
// @Title zh-CN 版本
|
||||
// @Description zh-CN 请求AI服务的版本,目前仅适用于Claude AI服务
|
||||
claudeVersion string `required:"false" yaml:"version" json:"version"`
|
||||
// @Title zh-CN Cloudflare Account ID
|
||||
// @Description zh-CN 仅适用于 Cloudflare Workers AI 服务。参考:https://developers.cloudflare.com/workers-ai/get-started/rest-api/#2-run-a-model-via-api
|
||||
cloudflareAccountId string `required:"false" yaml:"cloudflareAccountId" json:"cloudflareAccountId"`
|
||||
// @Title zh-CN Gemini AI内容过滤和安全级别设定
|
||||
// @Description zh-CN 仅适用于 Gemini AI 服务。参考:https://ai.google.dev/gemini-api/docs/safety-settings
|
||||
geminiSafetySetting map[string]string `required:"false" yaml:"geminiSafetySetting" json:"geminiSafetySetting"`
|
||||
}
|
||||
|
||||
func (c *ProviderConfig) FromJson(json gjson.Result) {
|
||||
@@ -140,6 +185,7 @@ func (c *ProviderConfig) FromJson(json gjson.Result) {
|
||||
if c.timeout == 0 {
|
||||
c.timeout = defaultTimeout
|
||||
}
|
||||
c.openaiCustomUrl = json.Get("openaiCustomUrl").String()
|
||||
c.moonshotFileId = json.Get("moonshotFileId").String()
|
||||
c.azureServiceUrl = json.Get("azureServiceUrl").String()
|
||||
c.qwenFileIds = make([]string, 0)
|
||||
@@ -162,12 +208,20 @@ func (c *ProviderConfig) FromJson(json gjson.Result) {
|
||||
c.context = &ContextConfig{}
|
||||
c.context.FromJson(contextJson)
|
||||
}
|
||||
c.claudeVersion = json.Get("claudeVersion").String()
|
||||
c.hunyuanAuthId = json.Get("hunyuanAuthId").String()
|
||||
c.hunyuanAuthKey = json.Get("hunyuanAuthKey").String()
|
||||
c.minimaxGroupId = json.Get("minimaxGroupId").String()
|
||||
c.cloudflareAccountId = json.Get("cloudflareAccountId").String()
|
||||
if c.typ == providerTypeGemini {
|
||||
c.geminiSafetySetting = make(map[string]string)
|
||||
for k, v := range json.Get("geminiSafetySetting").Map() {
|
||||
c.geminiSafetySetting[k] = v.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ProviderConfig) Validate() error {
|
||||
if c.apiTokens == nil || len(c.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
if c.timeout < 0 {
|
||||
return errors.New("invalid timeout in config")
|
||||
}
|
||||
@@ -182,7 +236,6 @@ func (c *ProviderConfig) Validate() error {
|
||||
|
||||
if c.typ == "" {
|
||||
return errors.New("missing type in provider config")
|
||||
|
||||
}
|
||||
initializer, has := providerInitializers[c.typ]
|
||||
if !has {
|
||||
@@ -216,16 +269,38 @@ func CreateProvider(pc ProviderConfig) (Provider, error) {
|
||||
}
|
||||
|
||||
func getMappedModel(model string, modelMapping map[string]string, log wrapper.Log) string {
|
||||
if modelMapping == nil || len(modelMapping) == 0 {
|
||||
return model
|
||||
}
|
||||
if v, ok := modelMapping[model]; ok && len(v) != 0 {
|
||||
log.Debugf("model %s is mapped to %s explictly", model, v)
|
||||
return v
|
||||
}
|
||||
if v, ok := modelMapping[wildcard]; ok {
|
||||
log.Debugf("model %s is mapped to %s via wildcard", model, v)
|
||||
return v
|
||||
mappedModel := doGetMappedModel(model, modelMapping, log)
|
||||
if len(mappedModel) != 0 {
|
||||
return mappedModel
|
||||
}
|
||||
return model
|
||||
}
|
||||
|
||||
func doGetMappedModel(model string, modelMapping map[string]string, log wrapper.Log) string {
|
||||
if modelMapping == nil || len(modelMapping) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
if v, ok := modelMapping[model]; ok {
|
||||
log.Debugf("model [%s] is mapped to [%s] explictly", model, v)
|
||||
return v
|
||||
}
|
||||
|
||||
for k, v := range modelMapping {
|
||||
if k == wildcard || !strings.HasSuffix(k, wildcard) {
|
||||
continue
|
||||
}
|
||||
k = strings.TrimSuffix(k, wildcard)
|
||||
if strings.HasPrefix(model, k) {
|
||||
log.Debugf("model [%s] is mapped to [%s] via prefix [%s]", model, v, k)
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
if v, ok := modelMapping[wildcard]; ok {
|
||||
log.Debugf("model [%s] is mapped to [%s] via wildcard", model, v)
|
||||
return v
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -21,6 +22,7 @@ const (
|
||||
|
||||
qwenDomain = "dashscope.aliyuncs.com"
|
||||
qwenChatCompletionPath = "/api/v1/services/aigc/text-generation/generation"
|
||||
qwenTextEmbeddingPath = "/api/v1/services/embeddings/text-embedding/text-embedding"
|
||||
|
||||
qwenTopPMin = 0.000001
|
||||
qwenTopPMax = 0.999999
|
||||
@@ -37,6 +39,9 @@ func (m *qwenProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if len(config.qwenFileIds) != 0 && config.context != nil {
|
||||
return errors.New("qwenFileIds and context cannot be configured at the same time")
|
||||
}
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -58,15 +63,17 @@ func (m *qwenProvider) GetProviderType() string {
|
||||
}
|
||||
|
||||
func (m *qwenProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
if apiName == ApiNameChatCompletion {
|
||||
_ = util.OverwriteRequestPath(qwenChatCompletionPath)
|
||||
} else if apiName == ApiNameEmbeddings {
|
||||
_ = util.OverwriteRequestPath(qwenTextEmbeddingPath)
|
||||
} else {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(qwenChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(qwenDomain)
|
||||
_ = proxywasm.ReplaceHttpRequestHeader("Authorization", "Bearer "+m.config.GetRandomToken())
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
|
||||
if m.config.protocol == protocolOriginal && m.config.context == nil {
|
||||
ctx.DontReadRequestBody()
|
||||
if m.config.protocol == protocolOriginal {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
@@ -78,10 +85,16 @@ func (m *qwenProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName
|
||||
}
|
||||
|
||||
func (m *qwenProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
if apiName == ApiNameChatCompletion {
|
||||
return m.onChatCompletionRequestBody(ctx, body, log)
|
||||
}
|
||||
if apiName == ApiNameEmbeddings {
|
||||
return m.onEmbeddingsRequestBody(ctx, body, log)
|
||||
}
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
func (m *qwenProvider) onChatCompletionRequestBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if m.config.protocol == protocolOriginal {
|
||||
if m.config.context == nil {
|
||||
return types.ActionContinue, nil
|
||||
@@ -99,11 +112,11 @@ func (m *qwenProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, b
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.qwen.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
m.insertContextMessage(request, content, false)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.qwen.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
@@ -152,7 +165,7 @@ func (m *qwenProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, b
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.qwen.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
qwenRequest := m.buildQwenTextGenerationRequest(request, streaming)
|
||||
@@ -160,7 +173,7 @@ func (m *qwenProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, b
|
||||
ctx.SetContext(ctxKeyIncrementalStreaming, qwenRequest.Parameters.IncrementalOutput)
|
||||
}
|
||||
if err := replaceJsonRequestBody(qwenRequest, log); err != nil {
|
||||
_ = util.SendResponse(500, util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
_ = util.SendResponse(500, "ai-proxy.qwen.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
@@ -169,6 +182,33 @@ func (m *qwenProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, b
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
|
||||
func (m *qwenProvider) onEmbeddingsRequestBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
request := &embeddingsRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
|
||||
log.Debugf("=== embeddings request: %v", request)
|
||||
|
||||
model := request.Model
|
||||
if model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in the request")
|
||||
}
|
||||
ctx.SetContext(ctxKeyOriginalRequestModel, model)
|
||||
mappedModel := getMappedModel(model, m.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
request.Model = mappedModel
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
|
||||
if qwenRequest, err := m.buildQwenTextEmbeddingRequest(request); err == nil {
|
||||
return types.ActionContinue, replaceJsonRequestBody(qwenRequest, log)
|
||||
} else {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
}
|
||||
|
||||
func (m *qwenProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if m.config.protocol == protocolOriginal {
|
||||
ctx.DontReadResponseBody()
|
||||
@@ -180,15 +220,16 @@ func (m *qwenProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiNam
|
||||
}
|
||||
|
||||
func (m *qwenProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if name != ApiNameChatCompletion {
|
||||
return chunk, nil
|
||||
}
|
||||
|
||||
receivedBody := chunk
|
||||
if bufferedStreamingBody, has := ctx.GetContext(ctxKeyStreamingBody).([]byte); has {
|
||||
receivedBody = append(bufferedStreamingBody, chunk...)
|
||||
}
|
||||
|
||||
incrementalStreaming, err := ctx.GetContext(ctxKeyIncrementalStreaming).(bool)
|
||||
if !err {
|
||||
incrementalStreaming = false
|
||||
}
|
||||
incrementalStreaming := ctx.GetBoolContext(ctxKeyIncrementalStreaming, false)
|
||||
|
||||
eventStartIndex, lineStartIndex, valueStartIndex := -1, -1, -1
|
||||
|
||||
@@ -264,6 +305,16 @@ func (m *qwenProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name Api
|
||||
}
|
||||
|
||||
func (m *qwenProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName == ApiNameChatCompletion {
|
||||
return m.onChatCompletionResponseBody(ctx, body, log)
|
||||
}
|
||||
if apiName == ApiNameEmbeddings {
|
||||
return m.onEmbeddingsResponseBody(ctx, body, log)
|
||||
}
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
|
||||
func (m *qwenProvider) onChatCompletionResponseBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
qwenResponse := &qwenTextGenResponse{}
|
||||
if err := json.Unmarshal(body, qwenResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal Qwen response: %v", err)
|
||||
@@ -272,6 +323,15 @@ func (m *qwenProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName,
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (m *qwenProvider) onEmbeddingsResponseBody(ctx wrapper.HttpContext, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
qwenResponse := &qwenTextEmbeddingResponse{}
|
||||
if err := json.Unmarshal(body, qwenResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal Qwen response: %v", err)
|
||||
}
|
||||
response := m.buildEmbeddingsResponse(ctx, qwenResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (m *qwenProvider) buildQwenTextGenerationRequest(origRequest *chatCompletionRequest, streaming bool) *qwenTextGenRequest {
|
||||
messages := make([]qwenMessage, 0, len(origRequest.Messages))
|
||||
for i := range origRequest.Messages {
|
||||
@@ -324,11 +384,11 @@ func (m *qwenProvider) buildChatCompletionResponse(ctx wrapper.HttpContext, qwen
|
||||
return &chatCompletionResponse{
|
||||
Id: qwenResponse.RequestId,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetContext(ctxKeyFinalRequestModel).(string),
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletion,
|
||||
Choices: choices,
|
||||
Usage: chatCompletionUsage{
|
||||
Usage: usage{
|
||||
PromptTokens: qwenResponse.Usage.InputTokens,
|
||||
CompletionTokens: qwenResponse.Usage.OutputTokens,
|
||||
TotalTokens: qwenResponse.Usage.TotalTokens,
|
||||
@@ -340,7 +400,7 @@ func (m *qwenProvider) buildChatCompletionStreamingResponse(ctx wrapper.HttpCont
|
||||
baseMessage := chatCompletionResponse{
|
||||
Id: qwenResponse.RequestId,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Model: ctx.GetContext(ctxKeyFinalRequestModel).(string),
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Choices: make([]chatCompletionChoice, 0),
|
||||
SystemFingerprint: "",
|
||||
Object: objectChatCompletionChunk,
|
||||
@@ -349,11 +409,19 @@ func (m *qwenProvider) buildChatCompletionStreamingResponse(ctx wrapper.HttpCont
|
||||
responses := make([]*chatCompletionResponse, 0)
|
||||
|
||||
qwenChoice := qwenResponse.Output.Choices[0]
|
||||
// Yes, Qwen uses a string "null" as null.
|
||||
finished := qwenChoice.FinishReason != "" && qwenChoice.FinishReason != "null"
|
||||
message := qwenChoice.Message
|
||||
|
||||
deltaContentMessage := &chatMessage{Role: message.Role, Content: message.Content}
|
||||
deltaToolCallsMessage := &chatMessage{Role: message.Role, ToolCalls: append([]toolCall{}, message.ToolCalls...)}
|
||||
if !incrementalStreaming {
|
||||
for _, tc := range message.ToolCalls {
|
||||
if tc.Function.Arguments == "" && !finished {
|
||||
// We don't push any tool call until its arguments are available.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if pushedMessage, ok := ctx.GetContext(ctxKeyPushedMessage).(qwenMessage); ok {
|
||||
if message.Content == "" {
|
||||
message.Content = pushedMessage.Content
|
||||
@@ -386,13 +454,13 @@ func (m *qwenProvider) buildChatCompletionStreamingResponse(ctx wrapper.HttpCont
|
||||
responses = append(responses, &response)
|
||||
}
|
||||
|
||||
// Yes, Qwen uses a string "null" as null.
|
||||
if qwenChoice.FinishReason != "" && qwenChoice.FinishReason != "null" {
|
||||
if finished {
|
||||
finishResponse := *&baseMessage
|
||||
finishResponse.Choices = append(finishResponse.Choices, chatCompletionChoice{FinishReason: qwenChoice.FinishReason})
|
||||
finishResponse.Choices = append(finishResponse.Choices, chatCompletionChoice{Delta: &chatMessage{}, FinishReason: qwenChoice.FinishReason})
|
||||
|
||||
usageResponse := *&baseMessage
|
||||
usageResponse.Usage = chatCompletionUsage{
|
||||
usageResponse.Choices = []chatCompletionChoice{{Delta: &chatMessage{}}}
|
||||
usageResponse.Usage = usage{
|
||||
PromptTokens: qwenResponse.Usage.InputTokens,
|
||||
CompletionTokens: qwenResponse.Usage.OutputTokens,
|
||||
TotalTokens: qwenResponse.Usage.TotalTokens,
|
||||
@@ -477,6 +545,50 @@ func (m *qwenProvider) appendStreamEvent(responseBuilder *strings.Builder, event
|
||||
responseBuilder.WriteString("\n\n")
|
||||
}
|
||||
|
||||
func (m *qwenProvider) buildQwenTextEmbeddingRequest(request *embeddingsRequest) (*qwenTextEmbeddingRequest, error) {
|
||||
var texts []string
|
||||
if str, isString := request.Input.(string); isString {
|
||||
texts = []string{str}
|
||||
} else if strs, isArray := request.Input.([]interface{}); isArray {
|
||||
texts = make([]string, 0, len(strs))
|
||||
for _, item := range strs {
|
||||
if str, isString := item.(string); isString {
|
||||
texts = append(texts, str)
|
||||
} else {
|
||||
return nil, errors.New("unsupported input type in array: " + reflect.TypeOf(item).String())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return nil, errors.New("unsupported input type: " + reflect.TypeOf(request.Input).String())
|
||||
}
|
||||
return &qwenTextEmbeddingRequest{
|
||||
Model: request.Model,
|
||||
Input: qwenTextEmbeddingInput{
|
||||
Texts: texts,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *qwenProvider) buildEmbeddingsResponse(ctx wrapper.HttpContext, qwenResponse *qwenTextEmbeddingResponse) *embeddingsResponse {
|
||||
data := make([]embedding, 0, len(qwenResponse.Output.Embeddings))
|
||||
for _, qwenEmbedding := range qwenResponse.Output.Embeddings {
|
||||
data = append(data, embedding{
|
||||
Object: "embedding",
|
||||
Index: qwenEmbedding.TextIndex,
|
||||
Embedding: qwenEmbedding.Embedding,
|
||||
})
|
||||
}
|
||||
return &embeddingsResponse{
|
||||
Object: "list",
|
||||
Data: data,
|
||||
Model: ctx.GetContext(ctxKeyFinalRequestModel).(string),
|
||||
Usage: usage{
|
||||
PromptTokens: qwenResponse.Usage.TotalTokens,
|
||||
TotalTokens: qwenResponse.Usage.TotalTokens,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type qwenTextGenRequest struct {
|
||||
Model string `json:"model"`
|
||||
Input qwenTextGenInput `json:"input"`
|
||||
@@ -503,7 +615,7 @@ type qwenTextGenParameters struct {
|
||||
type qwenTextGenResponse struct {
|
||||
RequestId string `json:"request_id"`
|
||||
Output qwenTextGenOutput `json:"output"`
|
||||
Usage qwenTextGenUsage `json:"usage"`
|
||||
Usage qwenUsage `json:"usage"`
|
||||
}
|
||||
|
||||
type qwenTextGenOutput struct {
|
||||
@@ -516,7 +628,7 @@ type qwenTextGenChoice struct {
|
||||
Message qwenMessage `json:"message"`
|
||||
}
|
||||
|
||||
type qwenTextGenUsage struct {
|
||||
type qwenUsage struct {
|
||||
InputTokens int `json:"input_tokens"`
|
||||
OutputTokens int `json:"output_tokens"`
|
||||
TotalTokens int `json:"total_tokens"`
|
||||
@@ -529,6 +641,36 @@ type qwenMessage struct {
|
||||
ToolCalls []toolCall `json:"tool_calls,omitempty"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddingRequest struct {
|
||||
Model string `json:"model"`
|
||||
Input qwenTextEmbeddingInput `json:"input"`
|
||||
Parameters qwenTextEmbeddingParameters `json:"parameters,omitempty"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddingInput struct {
|
||||
Texts []string `json:"texts"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddingParameters struct {
|
||||
TextType string `json:"text_type,omitempty"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddingResponse struct {
|
||||
RequestId string `json:"request_id"`
|
||||
Output qwenTextEmbeddingOutput `json:"output"`
|
||||
Usage qwenUsage `json:"usage"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddingOutput struct {
|
||||
RequestId string `json:"request_id"`
|
||||
Embeddings []qwenTextEmbeddings `json:"embeddings"`
|
||||
}
|
||||
|
||||
type qwenTextEmbeddings struct {
|
||||
TextIndex int `json:"text_index"`
|
||||
Embedding []float64 `json:"embedding"`
|
||||
}
|
||||
|
||||
func qwenMessageToChatMessage(qwenMessage qwenMessage) chatMessage {
|
||||
return chatMessage{
|
||||
Name: qwenMessage.Name,
|
||||
|
||||
@@ -2,7 +2,6 @@ package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
@@ -14,7 +13,7 @@ func decodeChatCompletionRequest(body []byte, request *chatCompletionRequest) er
|
||||
return fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Messages == nil || len(request.Messages) == 0 {
|
||||
return errors.New("no message found in the request body")
|
||||
return fmt.Errorf("no message found in the request body: %s", body)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
207
plugins/wasm-go/extensions/ai-proxy/provider/spark.go
Normal file
207
plugins/wasm-go/extensions/ai-proxy/provider/spark.go
Normal file
@@ -0,0 +1,207 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
// sparkProvider is the provider for SparkLLM AI service.
|
||||
const (
|
||||
sparkHost = "spark-api-open.xf-yun.com"
|
||||
sparkChatCompletionPath = "/v1/chat/completions"
|
||||
)
|
||||
|
||||
type sparkProviderInitializer struct {
|
||||
}
|
||||
|
||||
type sparkProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
type sparkRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []chatMessage `json:"messages"`
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
TopK int `json:"top_k,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
Tools []tool `json:"tools,omitempty"`
|
||||
ToolChoice string `json:"tool_choice,omitempty"`
|
||||
}
|
||||
|
||||
type sparkResponse struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Sid string `json:"sid"`
|
||||
Choices []chatCompletionChoice `json:"choices"`
|
||||
Usage usage `json:"usage,omitempty"`
|
||||
}
|
||||
|
||||
type sparkStreamResponse struct {
|
||||
sparkResponse
|
||||
Id string `json:"id"`
|
||||
Created int64 `json:"created"`
|
||||
}
|
||||
|
||||
func (i *sparkProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *sparkProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &sparkProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *sparkProvider) GetProviderType() string {
|
||||
return providerTypeSpark
|
||||
}
|
||||
|
||||
func (p *sparkProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestHost(sparkHost)
|
||||
_ = util.OverwriteRequestPath(sparkChatCompletionPath)
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + p.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Accept-Encoding")
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (p *sparkProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
// 使用Spark协议
|
||||
if p.config.protocol == protocolOriginal {
|
||||
request := &sparkRequest{}
|
||||
if err := json.Unmarshal(body, request); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal request: %v", err)
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("request model is empty")
|
||||
}
|
||||
// 目前星火在模型名称错误时,也会调用generalv3,这里还是按照输入的模型名称设置响应里的模型名称
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, request.Model)
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
} else {
|
||||
// 使用openai协议
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
if request.Model == "" {
|
||||
return types.ActionContinue, errors.New("missing model in chat completion request")
|
||||
}
|
||||
// 映射模型
|
||||
mappedModel := getMappedModel(request.Model, p.config.modelMapping, log)
|
||||
if mappedModel == "" {
|
||||
return types.ActionContinue, errors.New("model becomes empty after applying the configured mapping")
|
||||
}
|
||||
ctx.SetContext(ctxKeyFinalRequestModel, mappedModel)
|
||||
request.Model = mappedModel
|
||||
return types.ActionContinue, replaceJsonRequestBody(request, log)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *sparkProvider) OnResponseHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
_ = proxywasm.RemoveHttpResponseHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (p *sparkProvider) OnResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
sparkResponse := &sparkResponse{}
|
||||
if err := json.Unmarshal(body, sparkResponse); err != nil {
|
||||
return types.ActionContinue, fmt.Errorf("unable to unmarshal spark response: %v", err)
|
||||
}
|
||||
if sparkResponse.Code != 0 {
|
||||
return types.ActionContinue, fmt.Errorf("spark response error, error_code: %d, error_message: %s", sparkResponse.Code, sparkResponse.Message)
|
||||
}
|
||||
response := p.responseSpark2OpenAI(ctx, sparkResponse)
|
||||
return types.ActionContinue, replaceJsonResponseBody(response, log)
|
||||
}
|
||||
|
||||
func (p *sparkProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) {
|
||||
if isLastChunk || len(chunk) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
responseBuilder := &strings.Builder{}
|
||||
lines := strings.Split(string(chunk), "\n")
|
||||
for _, data := range lines {
|
||||
if len(data) < 6 {
|
||||
// ignore blank line or wrong format
|
||||
continue
|
||||
}
|
||||
data = data[6:]
|
||||
// The final response is `data: [DONE]`
|
||||
if data == "[DONE]" {
|
||||
continue
|
||||
}
|
||||
var sparkResponse sparkStreamResponse
|
||||
if err := json.Unmarshal([]byte(data), &sparkResponse); err != nil {
|
||||
log.Errorf("unable to unmarshal spark response: %v", err)
|
||||
continue
|
||||
}
|
||||
response := p.streamResponseSpark2OpenAI(ctx, &sparkResponse)
|
||||
responseBody, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
log.Errorf("unable to marshal response: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
p.appendResponse(responseBuilder, string(responseBody))
|
||||
}
|
||||
modifiedResponseChunk := responseBuilder.String()
|
||||
log.Debugf("=== modified response chunk: %s", modifiedResponseChunk)
|
||||
return []byte(modifiedResponseChunk), nil
|
||||
}
|
||||
|
||||
func (p *sparkProvider) responseSpark2OpenAI(ctx wrapper.HttpContext, response *sparkResponse) *chatCompletionResponse {
|
||||
choices := make([]chatCompletionChoice, len(response.Choices))
|
||||
for idx, c := range response.Choices {
|
||||
choices[idx] = chatCompletionChoice{
|
||||
Index: c.Index,
|
||||
Message: &chatMessage{Role: c.Message.Role, Content: c.Message.Content},
|
||||
}
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Sid,
|
||||
Created: time.Now().UnixMilli() / 1000,
|
||||
Object: objectChatCompletion,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Choices: choices,
|
||||
Usage: response.Usage,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *sparkProvider) streamResponseSpark2OpenAI(ctx wrapper.HttpContext, response *sparkStreamResponse) *chatCompletionResponse {
|
||||
choices := make([]chatCompletionChoice, len(response.Choices))
|
||||
for idx, c := range response.Choices {
|
||||
choices[idx] = chatCompletionChoice{
|
||||
Index: c.Index,
|
||||
Delta: &chatMessage{Role: c.Delta.Role, Content: c.Delta.Content},
|
||||
}
|
||||
}
|
||||
return &chatCompletionResponse{
|
||||
Id: response.Sid,
|
||||
Created: response.Created,
|
||||
Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""),
|
||||
Object: objectChatCompletion,
|
||||
Choices: choices,
|
||||
Usage: response.Usage,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *sparkProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) {
|
||||
responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody))
|
||||
}
|
||||
83
plugins/wasm-go/extensions/ai-proxy/provider/stepfun.go
Normal file
83
plugins/wasm-go/extensions/ai-proxy/provider/stepfun.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package provider
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util"
|
||||
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
|
||||
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
|
||||
)
|
||||
|
||||
const (
|
||||
stepfunDomain = "api.stepfun.com"
|
||||
stepfunChatCompletionPath = "/v1/chat/completions"
|
||||
)
|
||||
|
||||
type stepfunProviderInitializer struct {
|
||||
}
|
||||
|
||||
func (m *stepfunProviderInitializer) ValidateConfig(config ProviderConfig) error {
|
||||
if config.apiTokens == nil || len(config.apiTokens) == 0 {
|
||||
return errors.New("no apiToken found in provider config")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *stepfunProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) {
|
||||
return &stepfunProvider{
|
||||
config: config,
|
||||
contextCache: createContextCache(&config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type stepfunProvider struct {
|
||||
config ProviderConfig
|
||||
contextCache *contextCache
|
||||
}
|
||||
|
||||
func (m *stepfunProvider) GetProviderType() string {
|
||||
return providerTypeStepfun
|
||||
}
|
||||
|
||||
func (m *stepfunProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
_ = util.OverwriteRequestPath(stepfunChatCompletionPath)
|
||||
_ = util.OverwriteRequestHost(stepfunDomain)
|
||||
_ = util.OverwriteRequestAuthorization("Bearer " + m.config.GetRandomToken())
|
||||
_ = proxywasm.RemoveHttpRequestHeader("Content-Length")
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
|
||||
func (m *stepfunProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) {
|
||||
if apiName != ApiNameChatCompletion {
|
||||
return types.ActionContinue, errUnsupportedApiName
|
||||
}
|
||||
if m.contextCache == nil {
|
||||
return types.ActionContinue, nil
|
||||
}
|
||||
request := &chatCompletionRequest{}
|
||||
if err := decodeChatCompletionRequest(body, request); err != nil {
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
err := m.contextCache.GetContent(func(content string, err error) {
|
||||
defer func() {
|
||||
_ = proxywasm.ResumeHttpRequest()
|
||||
}()
|
||||
if err != nil {
|
||||
log.Errorf("failed to load context file: %v", err)
|
||||
_ = util.SendResponse(500, "ai-proxy.stepfun.load_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to load context file: %v", err))
|
||||
}
|
||||
insertContextMessage(request, content)
|
||||
if err := replaceJsonRequestBody(request, log); err != nil {
|
||||
_ = util.SendResponse(500, "ai-proxy.stepfun.insert_ctx_failed", util.MimeTypeTextPlain, fmt.Sprintf("failed to replace request body: %v", err))
|
||||
}
|
||||
}, log)
|
||||
if err == nil {
|
||||
return types.ActionPause, nil
|
||||
}
|
||||
return types.ActionContinue, err
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user