mirror of
https://github.com/alibaba/higress.git
synced 2026-02-06 23:21:08 +08:00
plugin ai-data-mask add log (#1423)
This commit is contained in:
@@ -36,7 +36,7 @@ description: AI 数据脱敏插件配置参考
|
||||
| deny_openai | bool | true | 对openai协议进行拦截 |
|
||||
| deny_jsonpath | string | [] | 对指定jsonpath拦截 |
|
||||
| deny_raw | bool | false | 对原始body拦截 |
|
||||
| system_deny | bool | true | 开启内置拦截规则 |
|
||||
| system_deny | bool | false | 开启内置拦截规则 |
|
||||
| deny_code | int | 200 | 拦截时http状态码 |
|
||||
| deny_message | string | 提问或回答中包含敏感词,已被屏蔽 | 拦截时ai返回消息 |
|
||||
| deny_raw_message | string | {"errmsg":"提问或回答中包含敏感词,已被屏蔽"} | 非openai拦截时返回内容 |
|
||||
|
||||
@@ -31,7 +31,7 @@ Plugin Execution Priority: `991`
|
||||
| deny_openai | bool | true | Intercept openai protocol |
|
||||
| deny_jsonpath | string | [] | Intercept specified jsonpath |
|
||||
| deny_raw | bool | false | Intercept raw body |
|
||||
| system_deny | bool | true | Enable built-in interception rules |
|
||||
| system_deny | bool | false | Enable built-in interception rules |
|
||||
| deny_code | int | 200 | HTTP status code when intercepted |
|
||||
| deny_message | string | Sensitive words found in the question or answer have been blocked | AI returned message when intercepted |
|
||||
| deny_raw_message | string | {"errmsg":"Sensitive words found in the question or answer have been blocked"} | Content returned when not openai intercepted |
|
||||
|
||||
@@ -71,6 +71,7 @@ struct AiDataMasking {
|
||||
is_openai: bool,
|
||||
stream: bool,
|
||||
res_body: Bytes,
|
||||
log: Log,
|
||||
}
|
||||
fn deserialize_regexp<'de, D>(deserializer: D) -> Result<Regex, D::Error>
|
||||
where
|
||||
@@ -159,7 +160,7 @@ fn default_deny_raw() -> bool {
|
||||
false
|
||||
}
|
||||
fn default_system_deny() -> bool {
|
||||
true
|
||||
false
|
||||
}
|
||||
fn default_deny_code() -> u16 {
|
||||
200
|
||||
@@ -257,13 +258,13 @@ impl DenyWord {
|
||||
DenyWord::empty()
|
||||
}
|
||||
|
||||
fn check(&self, message: &str) -> bool {
|
||||
fn check(&self, message: &str) -> Option<String> {
|
||||
for word in self.jieba.cut(message, true) {
|
||||
if self.words.contains(word) {
|
||||
return true;
|
||||
return Some(word.to_string());
|
||||
}
|
||||
}
|
||||
false
|
||||
None
|
||||
}
|
||||
}
|
||||
impl System {
|
||||
@@ -377,17 +378,30 @@ impl RootContextWrapper<AiDataMaskingConfig> for AiDataMaskingRoot {
|
||||
is_openai: false,
|
||||
stream: false,
|
||||
res_body: Bytes::new(),
|
||||
log: Log::new(PLUGIN_NAME.to_string()),
|
||||
}))
|
||||
}
|
||||
}
|
||||
impl AiDataMasking {
|
||||
fn check_message(&self, message: &str) -> bool {
|
||||
if let Some(config) = &self.config {
|
||||
config.deny_words.check(message)
|
||||
|| (config.system_deny && SYSTEM.deny_word.check(message))
|
||||
} else {
|
||||
false
|
||||
if let Some(word) = config.deny_words.check(message) {
|
||||
self.log().warn(&format!(
|
||||
"custom deny word {} matched from {}",
|
||||
word, message
|
||||
));
|
||||
return true;
|
||||
} else if config.system_deny {
|
||||
if let Some(word) = SYSTEM.deny_word.check(message) {
|
||||
self.log().warn(&format!(
|
||||
"system deny word {} matched from {}",
|
||||
word, message
|
||||
));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
fn msg_to_response(&self, msg: &str, raw_msg: &str, content_type: &str) -> (String, String) {
|
||||
if !self.is_openai {
|
||||
@@ -509,6 +523,10 @@ impl AiDataMasking {
|
||||
}
|
||||
}
|
||||
}
|
||||
if msg != message {
|
||||
self.log()
|
||||
.debug(&format!("replace_request_msg from {} to {}", message, msg));
|
||||
}
|
||||
msg
|
||||
}
|
||||
}
|
||||
@@ -590,6 +608,9 @@ impl HttpContext for AiDataMasking {
|
||||
}
|
||||
}
|
||||
impl HttpContextWrapper<AiDataMaskingConfig> for AiDataMasking {
|
||||
fn log(&self) -> &Log {
|
||||
&self.log
|
||||
}
|
||||
fn on_config(&mut self, config: Rc<AiDataMaskingConfig>) {
|
||||
self.config = Some(config.clone());
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user