feature: add ai-intent plugin (#1237)

Signed-off-by: jose <313503823@qq.com>
This commit is contained in:
jose
2024-08-27 19:25:18 +08:00
committed by GitHub
parent 75e1defd6c
commit 2c1773a7f0
6 changed files with 472 additions and 0 deletions

View File

@@ -0,0 +1 @@
EXTRA_TAGS=proxy_wasm_version_0_2_100

View File

@@ -0,0 +1,19 @@
# File generated by hgctl. Modify as required.
*
!/.gitignore
!*.go
!go.sum
!go.mod
!LICENSE
!*.md
!*.yaml
!*.yml
!*/
/out
/test

View File

@@ -0,0 +1,46 @@
## 简介
**Note**
> 需要数据面的proxy wasm版本大于等于0.2.100
> 编译时需要带上版本的tag例如`tinygo build -o main.wasm -scheduler=none -target=wasi -gc=custom -tags="custommalloc nottinygc_finalizer proxy_wasm_version_0_2_100" ./`
LLM 意图识别插件能够智能判断用户请求与某个领域或agent的功能契合度从而提升不同模型的应用效果和用户体验
## 配置说明
> 1.该插件的优先级要高于ai-cache、ai-proxy等后续使用意图的插件后续插件可以通过proxywasm.GetProperty([]string{"intent_category"})方法获取到意图主题,按照意图主题去做不同缓存库或者大模型的选择
> 2.需新建一条higress的大模型路由供该插件访问大模型,如:路由以 /intent 作为前缀服务选择大模型服务为该路由开启ai-proxy插件
> 3.需新建一个固定地址的服务intent-service服务指向127.0.0.1:80 (即自身网关实例+端口ai-intent插件内部需要该服务进行调用以访问上述新增的路由,服务名对应 llm.proxyServiceName也可以新建DNS类型服务使插件访问其他大模型
> 4.如果使用固定地址的服务调用网关自身需把127.0.0.1加入到网关的访问白名单中
| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 |
| -------------- | --------------- | -------- | ------ | ------------------------------------------------------------ |
| `scene.category` | string | 必填 | - | 预设场景类别,以"|"分割,如:"金融|电商|法律|Higress"|
| `scene.prompt` | string | 非必填 | 你是一个智能类别识别助手,负责根据用户提出的问题和预设的类别,确定问题属于哪个预设的类别,并给出相应的类别。用户提出的问题为:%s,预设的类别为%s直接返回一种具体类别如果没有找到就返回'NotFound'。 | llm请求prompt模板 |
| `llm.proxyServiceName` | string | 必填 | - | 新建的higress服务指向大模型 (取higress中的 FQDN 值)|
| `llm.proxyUrl` | string | 必填 | - | 大模型路由请求地址全路径可以是网关自身的地址也可以是其他大模型的地址openai协议例如http://127.0.0.1:80/intent/compatible-mode/v1/chat/completions |
| `llm.proxyDomain` | string | 非必填 | proxyUrl中解析获取 | 大模型服务的domain|
| `llm.proxyPort` | string | 非必填 | proxyUrl中解析获取 | 大模型服务端口号 |
| `llm.proxyApiKey` | string | 非必填 | - | 当使用外部大模型服务时需配置 对应大模型的 API_KEY |
| `llm.proxyModel` | string | 非必填 | qwen-long | 大模型类型 |
| `llm.proxyTimeout` | number | 非必填 | 10000 | 调用大模型超时时间单位ms默认10000ms |
## 配置示例
```yaml
scene:
category: "金融|电商|法律|Higress"
prompt: "你是一个智能类别识别助手,负责根据用户提出的问题和预设的类别,确定问题属于哪个预设的类别,并给出相应的类别。用户提出的问题为:'%s',预设的类别为'%s',直接返回一种具体类别,如果没有找到就返回'NotFound'。"
llm:
proxyServiceName: "intent-service.static"
proxyUrl: "http://127.0.0.1:80/intent/compatible-mode/v1/chat/completions"
proxyDomain: "127.0.0.1"
proxyPort: "80"
proxyModel: "qwen-long"
proxyApiKey: ""
proxyTimeout: "10000"
```

View File

@@ -0,0 +1,23 @@
// File generated by hgctl. Modify as required.
module github.com/alibaba/higress/plugins/wasm-go/extensions/ai-intent
go 1.19
replace github.com/alibaba/higress/plugins/wasm-go => ../..
require (
github.com/alibaba/higress/plugins/wasm-go v1.4.2
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f
github.com/tidwall/gjson v1.17.1
)
require (
github.com/google/uuid v1.6.0 // indirect
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 // indirect
github.com/magefile/mage v1.15.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/resp v0.1.1 // indirect
)

View File

@@ -0,0 +1,26 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520 h1:IHDghbGQ2DTIXHBHxWfqCYQW1fKjyJ/I7W1pMyUDeEA=
github.com/higress-group/nottinygc v0.0.0-20231101025119-e93c4c2f8520/go.mod h1:Nz8ORLaFiLWotg6GeKlJMhv8cci8mM43uEnLA5t8iew=
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f h1:ZIiIBRvIw62gA5MJhuwp1+2wWbqL9IGElQ499rUsYYg=
github.com/higress-group/proxy-wasm-go-sdk v0.0.0-20240711023527-ba358c48772f/go.mod h1:hNFjhrLUIq+kJ9bOcs8QtiplSQ61GZXtd2xHKx4BYRo=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tetratelabs/wazero v1.6.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A=
github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U=
github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/resp v0.1.1 h1:Ly20wkhqKTmDUPlyM1S7pWo5kk0tDu8OoC/vFArXmwE=
github.com/tidwall/resp v0.1.1/go.mod h1:3/FrruOBAxPTPtundW0VXgmsQ4ZBA0Aw714lVYgwFa0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -0,0 +1,357 @@
// File generated by hgctl. Modify as required.
// See:
package main
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper"
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm"
"github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types"
"github.com/tidwall/gjson"
)
const (
DefaultPrompt = "你是一个智能类别识别助手,负责根据用户提出的问题和预设的类别,确定问题属于哪个预设的类别,并给出相应的类别。用户提出的问题为:'%s',预设的类别为'%s',直接返回一种具体类别,如果没有找到就返回'NotFound'。"
defaultTimeout = 10 * 1000 // ms
)
func main() {
wrapper.SetCtx(
"ai-intent",
wrapper.ParseConfigBy(parseConfig),
wrapper.ProcessRequestHeadersBy(onHttpRequestHeaders),
wrapper.ProcessRequestBodyBy(onHttpRequestBody),
wrapper.ProcessResponseHeadersBy(onHttpResponseHeaders),
wrapper.ProcessStreamingResponseBodyBy(onStreamingResponseBody),
wrapper.ProcessResponseBodyBy(onHttpResponseBody),
)
}
// @Name ai-intent
// @Category protocol
// @Phase AUTHN
// @Priority 1000
// @Title zh-CN AI intent
// @Description zh-CN 大模型意图识别
// @IconUrl
// @Version 0.1.0
//
// @Contact.name jose
// @Contact.url
// @Contact.email
//@Example
// scene:
// category: "金融|电商|法律|Higress"
// prompt:"你是一个智能类别识别助手,负责根据用户提出的问题和预设的类别,确定问题属于哪个预设的类别,并给出相应的类别。用户提出的问题为:%s,预设的类别为%s直接返回一种具体类别如果没有找到就返回'NotFound'。"
// 例:"你是一个智能类别识别助手,负责根据用户提出的问题和预设的类别,确定问题属于哪个预设的类别,并给出相应的类别。用户提出的问题为:今天天气怎么样?,预设的类别为 ["金融","电商","法律"],直接返回一种具体类别,如果没有找到就返回"NotFound"。"
type SceneInfo struct {
Category string `require:"true" yaml:"category" json:"category"`
Prompt string `require:"false" yaml:"prompt" json:"prompt"`
//解析category后的数组
CategoryArr []string `yaml:"-" json:"-"`
}
type LLMInfo struct {
ProxyServiceName string `require:"true" yaml:"proxyServiceName" json:"proxyServiceName"`
ProxyUrl string `require:"false" yaml:"proxyUrl" json:"proxyUrl"`
ProxyModel string `require:"false" yaml:"proxyModel" json:"proxyModel"`
// @Title zh-CN 大模型服务端口
// @Description zh-CN 服务端口
ProxyPort int64 `required:"false" yaml:"proxyPort" json:"proxyPort"`
// @Title zh-CN 大模型服务域名
// @Description zh-CN 大模型服务域名
ProxyDomain string `required:"false" yaml:"proxyDomain" json:"proxyDomain"`
ProxyTimeout uint32 `require:"false" yaml:"proxyTimeout" json:"proxyTimeout"`
// @Title zh-CN 大模型服务的API_KEY
// @Description zh-CN 大模型服务的API_KEY
ProxyApiKey string `require:"false" yaml:"proxyApiKey" json:"proxyApiKey"`
ProxyClient wrapper.HttpClient `yaml:"-" json:"-"`
// @Title zh-CN 大模型接口路径
// @Description zh-CN 大模型接口路径
ProxyPath string `yaml:"-" json:"-"`
}
type PluginConfig struct {
// @Title zh-CN 意图相关配置
// @Description zh-CN SceneInfo
SceneInfo SceneInfo `required:"true" yaml:"scene" json:"scene"`
// @Title zh-CN 大模型相关配置
// @Description zh-CN LLMInfo
LLMInfo LLMInfo `required:"true" yaml:"llm" json:"llm"`
// @Title zh-CN key 的来源
// @Description zh-CN 使用的 key 的提取方式
KeyFrom KVExtractor `required:"true" yaml:"keyFrom" json:"keyFrom"`
}
type KVExtractor struct {
// @Title zh-CN 从请求 Body 中基于 [GJSON PATH](https://github.com/tidwall/gjson/blob/master/SYNTAX.md) 语法提取字符串
RequestBody string `required:"false" yaml:"requestBody" json:"requestBody"`
// @Title zh-CN 从响应 Body 中基于 [GJSON PATH](https://github.com/tidwall/gjson/blob/master/SYNTAX.md) 语法提取字符串
ResponseBody string `required:"false" yaml:"responseBody" json:"responseBody"`
}
func parseConfig(json gjson.Result, c *PluginConfig, log wrapper.Log) error {
log.Infof("config:%s", json.Raw)
// init scene
c.SceneInfo.Category = json.Get("scene.category").String()
log.Infof("SceneInfo.Category:%s", c.SceneInfo.Category)
if c.SceneInfo.Category == "" {
return errors.New("scene.category must not by empty")
}
c.SceneInfo.CategoryArr = strings.Split(c.SceneInfo.Category, "|")
if len(c.SceneInfo.CategoryArr) <= 0 {
return errors.New("scene.category resolve exception, should use '|' split")
}
c.SceneInfo.Prompt = json.Get("scene.prompt").String()
if c.SceneInfo.Prompt == "" {
c.SceneInfo.Prompt = DefaultPrompt
}
log.Infof("SceneInfo.Prompt:%s", c.SceneInfo.Prompt)
// init llmProxy
log.Debug("Start to init proxyService's http client.")
c.LLMInfo.ProxyServiceName = json.Get("llm.proxyServiceName").String()
log.Infof("ProxyServiceName: %s", c.LLMInfo.ProxyServiceName)
if c.LLMInfo.ProxyServiceName == "" {
return errors.New("llm.proxyServiceName must not by empty")
}
c.LLMInfo.ProxyUrl = json.Get("llm.proxyUrl").String()
log.Infof("c.LLMInfo.ProxyUrl:%s", c.LLMInfo.ProxyUrl)
if c.LLMInfo.ProxyUrl == "" {
return errors.New("llm.proxyUrl must not by empty")
}
//解析域名和path
parsedURL, err := url.Parse(c.LLMInfo.ProxyUrl)
if err != nil {
return errors.New("llm.proxyUrl parsing error")
}
c.LLMInfo.ProxyPath = parsedURL.Path
log.Infof("c.LLMInfo.ProxyPath:%s", c.LLMInfo.ProxyPath)
c.LLMInfo.ProxyDomain = json.Get("llm.proxyDomain").String()
//没有配置llm.proxyDomain时则从proxyUrl中解析获取
if c.LLMInfo.ProxyDomain == "" {
hostName := parsedURL.Hostname()
log.Infof("llm.proxyUrl.hostName:%s", hostName)
if hostName != "" {
c.LLMInfo.ProxyDomain = hostName
}
}
log.Infof("c.LLMInfo.ProxyDomain:%s", c.LLMInfo.ProxyDomain)
c.LLMInfo.ProxyPort = json.Get("llm.proxyPort").Int()
// 没有配置llm.proxyPort时则从proxyUrl中解析获取,如果解析的port为空则http协议端口默认80https端口默认443
if c.LLMInfo.ProxyPort <= 0 {
port := parsedURL.Port()
log.Infof("llm.proxyUrl.port:%s", port)
if port == "" {
c.LLMInfo.ProxyPort = 80
if parsedURL.Scheme == "https" {
c.LLMInfo.ProxyPort = 443
}
} else {
portNum, err := strconv.ParseInt(port, 10, 64)
if err != nil {
return errors.New("llm.proxyUrl.port parsing error")
}
c.LLMInfo.ProxyPort = portNum
}
}
log.Infof("c.LLMInfo.ProxyPort:%s", c.LLMInfo.ProxyPort)
c.LLMInfo.ProxyClient = wrapper.NewClusterClient(wrapper.FQDNCluster{
FQDN: c.LLMInfo.ProxyServiceName,
Port: c.LLMInfo.ProxyPort,
Host: c.LLMInfo.ProxyDomain,
})
c.LLMInfo.ProxyModel = json.Get("llm.proxyModel").String()
log.Infof("c.LLMInfo.ProxyModel:%s", c.LLMInfo.ProxyModel)
if c.LLMInfo.ProxyModel == "" {
c.LLMInfo.ProxyModel = "qwen-long"
}
c.LLMInfo.ProxyTimeout = uint32(json.Get("llm.proxyTimeout").Uint())
log.Infof("c.LLMInfo.ProxyTimeout:%s", c.LLMInfo.ProxyTimeout)
if c.LLMInfo.ProxyTimeout <= 0 {
c.LLMInfo.ProxyTimeout = defaultTimeout
}
c.LLMInfo.ProxyApiKey = json.Get("llm.proxyApiKey").String()
log.Infof("c.LLMInfo.ProxyApiKey:%s", c.LLMInfo.ProxyApiKey)
c.KeyFrom.RequestBody = json.Get("keyFrom.requestBody").String()
if c.KeyFrom.RequestBody == "" {
c.KeyFrom.RequestBody = "messages.@reverse.0.content"
}
c.KeyFrom.ResponseBody = json.Get("keyFrom.responseBody").String()
if c.KeyFrom.ResponseBody == "" {
c.KeyFrom.ResponseBody = "choices.0.message.content"
}
log.Debug("Init ai intent's components successfully.")
return nil
}
func onHttpRequestHeaders(ctx wrapper.HttpContext, config PluginConfig, log wrapper.Log) types.Action {
log.Debug("start onHttpRequestHeaders function.")
log.Debug("end onHttpRequestHeaders function.")
return types.HeaderStopIteration
}
func onHttpRequestBody(ctx wrapper.HttpContext, config PluginConfig, body []byte, log wrapper.Log) types.Action {
log.Debug("start onHttpRequestBody function.")
bodyJson := gjson.ParseBytes(body)
TempKey := strings.Trim(bodyJson.Get(config.KeyFrom.RequestBody).Raw, `"`)
//原始问题
originalQuestion, _ := zhToUnicode([]byte(TempKey))
log.Infof("[onHttpRequestBody] originalQuestion is: %s", string(originalQuestion))
//prompt拼接,替换问题和预设的场景类别,参数占位替换
promptStr := fmt.Sprintf(config.SceneInfo.Prompt, string(originalQuestion), config.SceneInfo.Category)
log.Infof("[onHttpRequestBody] after prompt is: %s", promptStr)
proxyUrl, proxyRequestBody, proxyRequestHeader := generateProxyRequest(&config, []string{string(promptStr)}, log)
log.Infof("[onHttpRequestBody] proxyUrl is: %s", proxyUrl)
log.Infof("[onHttpRequestBody] proxyRequestBody is: %s", string(proxyRequestBody))
//调用大模型 获取意向类型
llmProxyErr := config.LLMInfo.ProxyClient.Post(
proxyUrl,
proxyRequestHeader,
proxyRequestBody,
func(statusCode int, responseHeaders http.Header, responseBody []byte) {
log.Debug("Start llm.llmProxyClient func")
log.Infof("llm.llmProxyClient statusCode is:%s", statusCode)
log.Infof("llm.llmProxyClient intent responseBody is: %s", string(responseBody))
if statusCode == 200 {
proxyResponseBody, _ := proxyResponseHandler(responseBody, log)
//大模型返回的识别到的意图类型
if nil != proxyResponseBody && nil != proxyResponseBody.Choices && len(proxyResponseBody.Choices) > 0 {
category := proxyResponseBody.Choices[0].Message.Content
log.Infof("llmProxyClient intent response category is: %s", category)
//验证返回结果是否为 定义的枚举值结果集合,判断返回结果是否在预设的类型中。
for i := range config.SceneInfo.CategoryArr {
//防止空格、空字符串
if strings.TrimSpace(config.SceneInfo.CategoryArr[i]) == "" {
continue
}
//2种判定条件1.返回的category与该预设的场景完全一致 2.返回的category包含该预设的场景
if config.SceneInfo.CategoryArr[i] == category || strings.Contains(category, config.SceneInfo.CategoryArr[i]) {
// 把意图类型加入到Property中
log.Debug("llmProxyClient intent category set to Property")
proErr := proxywasm.SetProperty([]string{"intent_category"}, []byte(config.SceneInfo.CategoryArr[i]))
if proErr != nil {
log.Errorf("llmProxyClient proxywasm SetProperty error: %s", proErr.Error())
}
break
}
}
}
}
_ = proxywasm.ResumeHttpRequest()
return
}, config.LLMInfo.ProxyTimeout)
if llmProxyErr != nil {
log.Errorf("llmProxy intent error: %s", llmProxyErr.Error())
_ = proxywasm.ResumeHttpRequest()
}
log.Debug("end onHttpRequestHeaders function.")
return types.ActionPause
}
func onHttpResponseHeaders(ctx wrapper.HttpContext, config PluginConfig, log wrapper.Log) types.Action {
log.Debug("start onHttpResponseHeaders function.")
log.Debug("end onHttpResponseHeaders function.")
return types.ActionContinue
}
func onStreamingResponseBody(ctx wrapper.HttpContext, config PluginConfig, chunk []byte, isLastChunk bool, log wrapper.Log) []byte {
log.Debug("start onStreamingResponseBody function.")
log.Debug("end onStreamingResponseBody function.")
return chunk
}
func onHttpResponseBody(ctx wrapper.HttpContext, config PluginConfig, body []byte, log wrapper.Log) types.Action {
log.Debug("start onHttpResponseBody function.")
log.Debug("end onHttpResponseBody function.")
return types.ActionContinue
}
type ProxyRequest struct {
Model string `json:"model"`
Messages []ProxyRequestMessage `json:"messages"`
}
type ProxyRequestMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
func generateProxyRequest(c *PluginConfig, texts []string, log wrapper.Log) (string, []byte, [][2]string) {
url := c.LLMInfo.ProxyPath
var userMessage ProxyRequestMessage
userMessage.Role = "user"
userMessage.Content = texts[0]
var messages []ProxyRequestMessage
messages = append(messages, userMessage)
data := ProxyRequest{
Model: c.LLMInfo.ProxyModel,
Messages: messages,
}
requestBody, err := json.Marshal(data)
if err != nil {
log.Errorf("[generateProxyRequest] Marshal json error:%s, data:%s.", err, data)
return "", nil, nil
}
headers := [][2]string{
{"Content-Type", "application/json"},
{"Authorization", "Bearer " + c.LLMInfo.ProxyApiKey},
}
return url, requestBody, headers
}
func zhToUnicode(raw []byte) ([]byte, error) {
str, err := strconv.Unquote(strings.Replace(strconv.Quote(string(raw)), `\\u`, `\u`, -1))
if err != nil {
return nil, err
}
return []byte(str), nil
}
type ProxyResponse struct {
Status int `json:"code"`
Id string `json:"id"`
Choices []ProxyResponseOutputChoices `json:"choices"`
}
type ProxyResponseOutputChoices struct {
FinishReason string `json:"finish_reason"`
Message ProxyResponseOutputChoicesMessage `json:"message"`
}
type ProxyResponseOutputChoicesMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
func proxyResponseHandler(responseBody []byte, log wrapper.Log) (*ProxyResponse, error) {
var response ProxyResponse
err := json.Unmarshal(responseBody, &response)
if err != nil {
log.Errorf("[proxyResponseHandler]Unmarshal json error:%s", err)
return nil, err
}
return &response, nil
}
func getProxyResponseByExtractor(c *PluginConfig, responseBody []byte, log wrapper.Log) string {
bodyJson := gjson.ParseBytes(responseBody)
responseContent := strings.Trim(bodyJson.Get(c.KeyFrom.ResponseBody).Raw, `"`)
// llm返回的结果
originalAnswer, _ := zhToUnicode([]byte(responseContent))
return string(originalAnswer)
}