mirror of
https://github.com/alibaba/higress.git
synced 2026-02-06 23:21:08 +08:00
add value length limit for ai statistics, truncate when over limit (#2729)
This commit is contained in:
@@ -24,6 +24,7 @@ description: AI可观测配置参考
|
||||
|----------------|-------|------|-----|------------------------|
|
||||
| `attributes` | []Attribute | 非必填 | - | 用户希望记录在log/span中的信息 |
|
||||
| `disable_openai_usage` | bool | 非必填 | false | 非openai兼容协议时,model、token的支持非标,配置为true时可以避免报错 |
|
||||
| `value_length_limit` | int | 非必填 | 4000 | 记录的单个value的长度限制 |
|
||||
|
||||
Attribute 配置说明:
|
||||
|
||||
|
||||
@@ -24,6 +24,8 @@ Users can also expand observable values through configuration:
|
||||
|----------------|-------|------|-----|------------------------|
|
||||
| `attributes` | []Attribute | optional | - | Information that the user wants to record in log/span |
|
||||
| `disable_openai_usage` | bool | optional | false | When using a non-OpenAI-compatible protocol, the support for model and token is non-standard. Setting the configuration to true can prevent errors. |
|
||||
| `value_length_limit` | int | optional | 4000 | length limit for each value |
|
||||
|
||||
|
||||
Attribute Configuration instructions:
|
||||
|
||||
|
||||
@@ -99,6 +99,7 @@ type AIStatisticsConfig struct {
|
||||
shouldBufferStreamingBody bool
|
||||
// If disableOpenaiUsage is true, model/input_token/output_token logs will be skipped
|
||||
disableOpenaiUsage bool
|
||||
valueLengthLimit int
|
||||
}
|
||||
|
||||
func generateMetricName(route, cluster, model, consumer, metricName string) string {
|
||||
@@ -149,6 +150,11 @@ func (config *AIStatisticsConfig) incrementCounter(metricName string, inc uint64
|
||||
func parseConfig(configJson gjson.Result, config *AIStatisticsConfig) error {
|
||||
// Parse tracing span attributes setting.
|
||||
attributeConfigs := configJson.Get("attributes").Array()
|
||||
if configJson.Get("value_length_limit").Exists() {
|
||||
config.valueLengthLimit = int(configJson.Get("value_length_limit").Int())
|
||||
} else {
|
||||
config.valueLengthLimit = 4000
|
||||
}
|
||||
config.attributes = make([]Attribute, len(attributeConfigs))
|
||||
for i, attributeConfig := range attributeConfigs {
|
||||
attribute := Attribute{}
|
||||
@@ -195,12 +201,12 @@ func onHttpRequestHeaders(ctx wrapper.HttpContext, config AIStatisticsConfig) ty
|
||||
|
||||
ctx.SetRequestBodyBufferLimit(defaultMaxBodyBytes)
|
||||
|
||||
// Set span attributes for ARMS.
|
||||
setSpanAttribute(ArmsSpanKind, "LLM")
|
||||
// Set user defined log & span attributes which type is fixed_value
|
||||
setAttributeBySource(ctx, config, FixedValue, nil)
|
||||
// Set user defined log & span attributes which type is request_header
|
||||
setAttributeBySource(ctx, config, RequestHeader, nil)
|
||||
// Set span attributes for ARMS.
|
||||
setSpanAttribute(ArmsSpanKind, "LLM")
|
||||
|
||||
return types.ActionContinue
|
||||
}
|
||||
@@ -393,6 +399,9 @@ func setAttributeBySource(ctx wrapper.HttpContext, config AIStatisticsConfig, so
|
||||
if (value == nil || value == "") && attribute.DefaultValue != "" {
|
||||
value = attribute.DefaultValue
|
||||
}
|
||||
if len(fmt.Sprint(value)) > config.valueLengthLimit {
|
||||
value = fmt.Sprint(value)[:config.valueLengthLimit/2] + " [truncated] " + fmt.Sprint(value)[len(fmt.Sprint(value))-config.valueLengthLimit/2:]
|
||||
}
|
||||
log.Debugf("[attribute] source type: %s, key: %s, value: %+v", source, key, value)
|
||||
if attribute.ApplyToLog {
|
||||
if attribute.AsSeparateLogField {
|
||||
|
||||
Reference in New Issue
Block a user