mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-09 00:21:13 +08:00
Compare commits
60 Commits
codex/crea
...
codex/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a2cf829c7 | ||
|
|
12329b43d1 | ||
|
|
1a45603e0f | ||
|
|
4a73503399 | ||
|
|
83bf8c1d5e | ||
|
|
34e206f05d | ||
|
|
dc349923e9 | ||
|
|
0d44c9a823 | ||
|
|
02645af321 | ||
|
|
c3a175f13f | ||
|
|
0821d447f7 | ||
|
|
257794ca00 | ||
|
|
6a527de3eb | ||
|
|
2313f90eb3 | ||
|
|
7fde984e7d | ||
|
|
fc41e605e4 | ||
|
|
042e5fdbe6 | ||
|
|
629442bff6 | ||
|
|
7798910be0 | ||
|
|
6f036eb4fe | ||
|
|
56fc05cb3c | ||
|
|
a55a15659b | ||
|
|
ccf6e0c7ce | ||
|
|
87677f5968 | ||
|
|
fd93a2dc61 | ||
|
|
80f862a226 | ||
|
|
26bb85f4d4 | ||
|
|
398b4b482f | ||
|
|
2cfb302981 | ||
|
|
e75bd76b71 | ||
|
|
99c3ac1837 | ||
|
|
749ab560ff | ||
|
|
541ad4d149 | ||
|
|
03eb027ea4 | ||
|
|
4194b2be91 | ||
|
|
9dadaad5ba | ||
|
|
d4b3400c5f | ||
|
|
e585100625 | ||
|
|
e94471b53e | ||
|
|
997dacdbe6 | ||
|
|
c01349a436 | ||
|
|
4cf48f9157 | ||
|
|
796afbe612 | ||
|
|
dca14390ca | ||
|
|
39875acd35 | ||
|
|
62edc75735 | ||
|
|
26ca9fc916 | ||
|
|
cad70c23b3 | ||
|
|
016276dbc3 | ||
|
|
bd2d6e7485 | ||
|
|
df59a9fd4b | ||
|
|
2e70a3d273 | ||
|
|
3dc6935d19 | ||
|
|
779bb2db78 | ||
|
|
b3b0b194a3 | ||
|
|
e21b2f42d2 | ||
|
|
05a5acee7e | ||
|
|
755982098b | ||
|
|
af24263c0a | ||
|
|
8fd268bd11 |
@@ -2,6 +2,7 @@
|
||||
SERVER_PORT=8080
|
||||
FRONTEND_PORT=3000
|
||||
WEBSOCKET_PORT=8082
|
||||
OPENISLE_MCP_PORT=8085
|
||||
MYSQL_PORT=3306
|
||||
REDIS_PORT=6379
|
||||
RABBITMQ_PORT=5672
|
||||
|
||||
29
.github/workflows/coffee-bot.yml
vendored
Normal file
29
.github/workflows/coffee-bot.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Coffee Bot
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-coffee-bot:
|
||||
environment: Bots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --no-save @openai/agents tsx typescript
|
||||
|
||||
- name: Run coffee bot
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
|
||||
run: npx tsx bots/instance/coffee_bot.ts
|
||||
29
.github/workflows/reply-bots.yml
vendored
Normal file
29
.github/workflows/reply-bots.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Reply Bots
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "*/30 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-reply-bot:
|
||||
environment: Bots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --no-save @openai/agents tsx typescript
|
||||
|
||||
- name: Run reply bot
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
|
||||
run: npx tsx bots/instance/reply_bot.ts
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,7 +17,6 @@ dist
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
*.pem
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package com.openisle.controller;
|
||||
|
||||
import com.openisle.dto.CommentContextDto;
|
||||
import com.openisle.dto.CommentDto;
|
||||
import com.openisle.dto.CommentRequest;
|
||||
import com.openisle.dto.PostChangeLogDto;
|
||||
import com.openisle.dto.TimelineItemDto;
|
||||
import com.openisle.mapper.CommentMapper;
|
||||
import com.openisle.mapper.PostChangeLogMapper;
|
||||
import com.openisle.mapper.PostMapper;
|
||||
import com.openisle.model.Comment;
|
||||
import com.openisle.model.CommentSort;
|
||||
import com.openisle.service.*;
|
||||
@@ -40,6 +42,7 @@ public class CommentController {
|
||||
private final PointService pointService;
|
||||
private final PostChangeLogService changeLogService;
|
||||
private final PostChangeLogMapper postChangeLogMapper;
|
||||
private final PostMapper postMapper;
|
||||
|
||||
@Value("${app.captcha.enabled:false}")
|
||||
private boolean captchaEnabled;
|
||||
@@ -184,6 +187,37 @@ public class CommentController {
|
||||
return itemDtoList;
|
||||
}
|
||||
|
||||
@GetMapping("/comments/{commentId}/context")
|
||||
@Operation(
|
||||
summary = "Comment context",
|
||||
description = "Get a comment along with its previous comments and related post"
|
||||
)
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "Comment context",
|
||||
content = @Content(schema = @Schema(implementation = CommentContextDto.class))
|
||||
)
|
||||
public ResponseEntity<CommentContextDto> getCommentContext(@PathVariable Long commentId) {
|
||||
log.debug("getCommentContext called for comment {}", commentId);
|
||||
Comment comment = commentService.getComment(commentId);
|
||||
CommentContextDto dto = new CommentContextDto();
|
||||
dto.setPost(postMapper.toSummaryDto(comment.getPost()));
|
||||
dto.setTargetComment(commentMapper.toDtoWithReplies(comment));
|
||||
dto.setPreviousComments(
|
||||
commentService
|
||||
.getCommentsBefore(comment)
|
||||
.stream()
|
||||
.map(commentMapper::toDtoWithReplies)
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
log.debug(
|
||||
"getCommentContext returning {} previous comments for comment {}",
|
||||
dto.getPreviousComments().size(),
|
||||
commentId
|
||||
);
|
||||
return ResponseEntity.ok(dto);
|
||||
}
|
||||
|
||||
@DeleteMapping("/comments/{id}")
|
||||
@Operation(summary = "Delete comment", description = "Delete a comment")
|
||||
@ApiResponse(responseCode = "200", description = "Deleted")
|
||||
|
||||
@@ -224,6 +224,26 @@ public class PostController {
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@GetMapping("/recent")
|
||||
@Operation(
|
||||
summary = "Recent posts",
|
||||
description = "List posts created within the specified number of minutes"
|
||||
)
|
||||
@ApiResponse(
|
||||
responseCode = "200",
|
||||
description = "Recent posts",
|
||||
content = @Content(
|
||||
array = @ArraySchema(schema = @Schema(implementation = PostSummaryDto.class))
|
||||
)
|
||||
)
|
||||
public List<PostSummaryDto> recentPosts(@RequestParam("minutes") int minutes) {
|
||||
return postService
|
||||
.listRecentPosts(minutes)
|
||||
.stream()
|
||||
.map(postMapper::toSummaryDto)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@GetMapping("/ranking")
|
||||
@Operation(summary = "Ranking posts", description = "List posts by view rankings")
|
||||
@ApiResponse(
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.openisle.dto;
|
||||
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* DTO representing the context of a comment including its post and previous comments.
|
||||
*/
|
||||
@Data
|
||||
public class CommentContextDto {
|
||||
|
||||
private PostSummaryDto post;
|
||||
private CommentDto targetComment;
|
||||
private List<CommentDto> previousComments;
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package com.openisle.repository;
|
||||
import com.openisle.model.Comment;
|
||||
import com.openisle.model.Post;
|
||||
import com.openisle.model.User;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
@@ -10,6 +11,10 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||
public interface CommentRepository extends JpaRepository<Comment, Long> {
|
||||
List<Comment> findByPostAndParentIsNullOrderByCreatedAtAsc(Post post);
|
||||
List<Comment> findByParentOrderByCreatedAtAsc(Comment parent);
|
||||
List<Comment> findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
|
||||
Post post,
|
||||
LocalDateTime createdAt
|
||||
);
|
||||
List<Comment> findByAuthorOrderByCreatedAtDesc(User author, Pageable pageable);
|
||||
List<Comment> findByContentContainingIgnoreCase(String keyword);
|
||||
|
||||
|
||||
@@ -19,6 +19,10 @@ public interface PostRepository extends JpaRepository<Post, Long> {
|
||||
List<Post> findByStatusOrderByCreatedAtDesc(PostStatus status, Pageable pageable);
|
||||
List<Post> findByStatusOrderByViewsDesc(PostStatus status);
|
||||
List<Post> findByStatusOrderByViewsDesc(PostStatus status, Pageable pageable);
|
||||
List<Post> findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
|
||||
PostStatus status,
|
||||
LocalDateTime createdAt
|
||||
);
|
||||
List<Post> findByAuthorAndStatusOrderByCreatedAtDesc(
|
||||
User author,
|
||||
PostStatus status,
|
||||
|
||||
@@ -266,6 +266,27 @@ public class CommentService {
|
||||
return replies;
|
||||
}
|
||||
|
||||
public Comment getComment(Long commentId) {
|
||||
log.debug("getComment called for id {}", commentId);
|
||||
return commentRepository
|
||||
.findById(commentId)
|
||||
.orElseThrow(() -> new com.openisle.exception.NotFoundException("Comment not found"));
|
||||
}
|
||||
|
||||
public List<Comment> getCommentsBefore(Comment comment) {
|
||||
log.debug("getCommentsBefore called for comment {}", comment.getId());
|
||||
List<Comment> comments = commentRepository.findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
|
||||
comment.getPost(),
|
||||
comment.getCreatedAt()
|
||||
);
|
||||
log.debug(
|
||||
"getCommentsBefore returning {} comments for comment {}",
|
||||
comments.size(),
|
||||
comment.getId()
|
||||
);
|
||||
return comments;
|
||||
}
|
||||
|
||||
public List<Comment> getRecentCommentsByUser(String username, int limit) {
|
||||
log.debug("getRecentCommentsByUser called for user {} with limit {}", username, limit);
|
||||
User user = userRepository
|
||||
|
||||
@@ -770,6 +770,18 @@ public class PostService {
|
||||
return listPostsByCategories(null, null, null);
|
||||
}
|
||||
|
||||
public List<Post> listRecentPosts(int minutes) {
|
||||
if (minutes <= 0) {
|
||||
throw new IllegalArgumentException("Minutes must be positive");
|
||||
}
|
||||
LocalDateTime since = LocalDateTime.now().minusMinutes(minutes);
|
||||
List<Post> posts = postRepository.findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
|
||||
PostStatus.PUBLISHED,
|
||||
since
|
||||
);
|
||||
return sortByPinnedAndCreated(posts);
|
||||
}
|
||||
|
||||
public List<Post> listPostsByViews(Integer page, Integer pageSize) {
|
||||
return listPostsByViews(null, null, page, pageSize);
|
||||
}
|
||||
|
||||
150
bots/bot_father.ts
Normal file
150
bots/bot_father.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { Agent, Runner, hostedMcpTool, withTrace } from "@openai/agents";
|
||||
|
||||
export type WorkflowInput = { input_as_text: string };
|
||||
|
||||
export abstract class BotFather {
|
||||
protected readonly allowedMcpTools = [
|
||||
"search",
|
||||
"create_post",
|
||||
"reply_to_post",
|
||||
"reply_to_comment",
|
||||
"recent_posts",
|
||||
"get_post",
|
||||
"list_unread_messages",
|
||||
"mark_notifications_read",
|
||||
"create_post",
|
||||
];
|
||||
|
||||
protected readonly openisleToken = (process.env.OPENISLE_TOKEN ?? "").trim();
|
||||
|
||||
protected readonly mcp = this.createHostedMcpTool();
|
||||
protected readonly agent: Agent;
|
||||
|
||||
constructor(protected readonly name: string) {
|
||||
console.log(`✅ ${this.name} starting...`);
|
||||
console.log(
|
||||
"🛠️ Configured Hosted MCP tools:",
|
||||
this.allowedMcpTools.join(", ")
|
||||
);
|
||||
|
||||
console.log(
|
||||
this.openisleToken
|
||||
? "🔑 OPENISLE_TOKEN detected in environment; it will be attached to MCP requests."
|
||||
: "🔓 OPENISLE_TOKEN not set; authenticated MCP tools may be unavailable."
|
||||
);
|
||||
|
||||
this.agent = new Agent({
|
||||
name: this.name,
|
||||
instructions: this.buildInstructions(),
|
||||
tools: [this.mcp],
|
||||
model: "gpt-4o",
|
||||
modelSettings: {
|
||||
temperature: 0.7,
|
||||
topP: 1,
|
||||
maxTokens: 2048,
|
||||
toolChoice: "auto",
|
||||
store: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
protected buildInstructions(): string {
|
||||
const instructions = [
|
||||
...this.getBaseInstructions(),
|
||||
...this.getAdditionalInstructions(),
|
||||
].filter(Boolean);
|
||||
return instructions.join("\n");
|
||||
}
|
||||
|
||||
protected getBaseInstructions(): string[] {
|
||||
return [
|
||||
"You are a helpful assistant for https://www.open-isle.com.",
|
||||
"Finish tasks end-to-end before replying. If multiple MCP tools are needed, call them sequentially until the task is truly done.",
|
||||
"When presenting the result, reply in Chinese with a concise summary and include any important URLs or IDs.",
|
||||
"After finishing replies, call mark_notifications_read with all processed notification IDs to keep the inbox clean.",
|
||||
];
|
||||
}
|
||||
|
||||
private createHostedMcpTool() {
|
||||
const token = this.openisleToken;
|
||||
const authConfig = token
|
||||
? {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
return hostedMcpTool({
|
||||
serverLabel: "openisle_mcp",
|
||||
serverUrl: "https://www.open-isle.com/mcp",
|
||||
allowedTools: this.allowedMcpTools,
|
||||
requireApproval: "never",
|
||||
...authConfig,
|
||||
});
|
||||
}
|
||||
|
||||
protected getAdditionalInstructions(): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
protected createRunner(): Runner {
|
||||
return new Runner({
|
||||
workflowName: this.name,
|
||||
traceMetadata: {
|
||||
__trace_source__: "agent-builder",
|
||||
workflow_id: "wf_69003cbd47e08190928745d3c806c0b50d1a01cfae052be8",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async runWorkflow(workflow: WorkflowInput) {
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
throw new Error("Missing OPENAI_API_KEY");
|
||||
}
|
||||
|
||||
const runner = this.createRunner();
|
||||
|
||||
return await withTrace(`${this.name} run`, async () => {
|
||||
const preview = workflow.input_as_text.trim();
|
||||
console.log(
|
||||
"📝 Received workflow input (preview):",
|
||||
preview.length > 200 ? `${preview.slice(0, 200)}…` : preview
|
||||
);
|
||||
|
||||
console.log("🚦 Starting agent run with maxTurns=16...");
|
||||
const result = await runner.run(this.agent, workflow.input_as_text, {
|
||||
maxTurns: 16,
|
||||
});
|
||||
|
||||
console.log("📬 Agent run completed. Result keys:", Object.keys(result));
|
||||
|
||||
if (!result.finalOutput) {
|
||||
throw new Error("Agent result is undefined (no final output).");
|
||||
}
|
||||
|
||||
const openisleBotResult = { output_text: String(result.finalOutput) };
|
||||
|
||||
console.log(
|
||||
"🤖 Agent result (length=%d):\n%s",
|
||||
openisleBotResult.output_text.length,
|
||||
openisleBotResult.output_text
|
||||
);
|
||||
return openisleBotResult;
|
||||
});
|
||||
}
|
||||
|
||||
protected abstract getCliQuery(): string;
|
||||
|
||||
public async runCli(): Promise<void> {
|
||||
try {
|
||||
const query = this.getCliQuery();
|
||||
console.log("🔍 Running workflow...");
|
||||
await this.runWorkflow({ input_as_text: query });
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
console.error("❌ Agent failed:", err?.stack || err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
63
bots/instance/coffee_bot.ts
Normal file
63
bots/instance/coffee_bot.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { BotFather, WorkflowInput } from "../bot_father";
|
||||
|
||||
const WEEKDAY_NAMES = ["日", "一", "二", "三", "四", "五", "六"] as const;
|
||||
|
||||
class CoffeeBot extends BotFather {
|
||||
constructor() {
|
||||
super("Coffee Bot");
|
||||
}
|
||||
|
||||
protected override getAdditionalInstructions(): string[] {
|
||||
return [
|
||||
"You are responsible for 发布每日抽奖早安贴。",
|
||||
"创建帖子时,确保标题、奖品信息、开奖时间以及领奖方式完全符合 CLI 查询提供的细节。",
|
||||
"正文需亲切友好,简洁明了,鼓励社区成员互动。",
|
||||
"开奖说明需明确告知中奖者需私聊站长 @nagisa 领取奖励。",
|
||||
"确保只发布一个帖子,避免重复调用 create_post。",
|
||||
];
|
||||
}
|
||||
|
||||
protected override getCliQuery(): string {
|
||||
const now = new Date();
|
||||
const beijingNow = new Date(
|
||||
now.toLocaleString("en-US", { timeZone: "Asia/Shanghai" })
|
||||
);
|
||||
const weekday = WEEKDAY_NAMES[beijingNow.getDay()];
|
||||
|
||||
const drawTime = new Date(beijingNow);
|
||||
drawTime.setHours(15, 0, 0, 0);
|
||||
const drawTimeText = drawTime
|
||||
.toLocaleTimeString("zh-CN", {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
hour12: false,
|
||||
timeZone: "Asia/Shanghai",
|
||||
})
|
||||
.replace(/^24:/, "00:");
|
||||
|
||||
return `
|
||||
请立即在 https://www.open-isle.com 使用 create_post 发表一篇全新帖子,遵循以下要求:
|
||||
1. 标题固定为「大家星期${weekday}早安--抽一杯咖啡」。
|
||||
2. 正文包含:
|
||||
- 亲切的早安问候;
|
||||
- 明确奖品写作“Coffee x 1”;
|
||||
- 奖品图片链接:https://openisle-1307107697.cos.accelerate.myqcloud.com/dynamic_assert/0d6a9b33e9ca4fe5a90540187d3f9ecb.png;
|
||||
- 公布开奖时间为今天下午 15:00(北京时间,写成 ${drawTimeText});
|
||||
- 标注“领奖请私聊站长 @nagisa”;
|
||||
- 鼓励大家留言互动。
|
||||
3. 帖子语言使用简体中文,格式可用 Markdown,使关键信息醒目。
|
||||
4. 完成后只输出“已发布咖啡抽奖贴”,不额外生成总结。
|
||||
`.trim();
|
||||
}
|
||||
}
|
||||
|
||||
const coffeeBot = new CoffeeBot();
|
||||
|
||||
export const runWorkflow = async (workflow: WorkflowInput) => {
|
||||
return coffeeBot.runWorkflow(workflow);
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
coffeeBot.runCli();
|
||||
}
|
||||
|
||||
39
bots/instance/reply_bot.ts
Normal file
39
bots/instance/reply_bot.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
// reply_bot.ts
|
||||
import { BotFather, WorkflowInput } from "../bot_father";
|
||||
|
||||
class ReplyBot extends BotFather {
|
||||
constructor() {
|
||||
super("OpenIsle Bot");
|
||||
}
|
||||
|
||||
protected override getAdditionalInstructions(): string[] {
|
||||
return [
|
||||
"You are a helpful and cute assistant for https://www.open-isle.com. Keep the lovable tone with plentiful kawaii kaomoji (颜表情) such as (๑˃ᴗ˂)ﻭ, (•̀ω•́)✧, (。•ᴗ-)_♡, (⁎⁍̴̛ᴗ⁍̴̛⁎), etc., while staying professional and informative.",
|
||||
"OpenIsle 是一个由 Spring Boot + Vue 3 打造的开源社区平台,提供注册登录、OAuth 登录(Google/GitHub/Discord/Twitter)、帖子与评论互动、标签分类、草稿、统计分析、通知消息、全局搜索、Markdown 支持、图片上传(默认腾讯云 COS)、浏览器推送、DiceBear 头像等功能,旨在帮助团队快速搭建属于自己的技术社区。",
|
||||
"回复时请主动结合上述站点背景,为用户提供有洞察力、可执行的建议或答案,并在需要时引用官网 https://www.open-isle.com、GitHub 仓库 https://github.com/nagisa77/OpenIsle 或相关文档链接,避免空泛的安慰或套话。",
|
||||
"When presenting the result, reply in Chinese with a concise yet content-rich summary filled with kaomoji,并清晰列出关键结论、操作步骤、重要 URL 或 ID,确保用户能直接采取行动。",
|
||||
];
|
||||
}
|
||||
|
||||
protected override getCliQuery(): string {
|
||||
return `
|
||||
【AUTO】无需确认,自动处理所有未读的提及与评论:
|
||||
1)调用 list_unread_messages;
|
||||
2)依次处理每条“提及/评论”:如需上下文则使用 get_post 获取,生成简明中文回复;如有 commentId 则用 reply_to_comment,否则用 reply_to_post;
|
||||
3)跳过关注和系统事件;
|
||||
4)保证幂等性:如该贴最后一条是你自己发的回复,则跳过;
|
||||
5)调用 mark_notifications_read,传入本次已处理的通知 ID 清理已读;
|
||||
6)最多只处理最新10条;结束时仅输出简要摘要(包含URL或ID)。
|
||||
`.trim();
|
||||
}
|
||||
}
|
||||
|
||||
const replyBot = new ReplyBot();
|
||||
|
||||
export const runWorkflow = async (workflow: WorkflowInput) => {
|
||||
return replyBot.runWorkflow(workflow);
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
replyBot.runCli();
|
||||
}
|
||||
@@ -40,12 +40,12 @@ echo "👉 Build images ..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
build --pull \
|
||||
--build-arg NUXT_ENV=production \
|
||||
frontend_service
|
||||
frontend_service mcp
|
||||
|
||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
up -d --force-recreate --remove-orphans --no-deps \
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service mcp
|
||||
|
||||
echo "👉 Current status:"
|
||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||
|
||||
@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
build --pull \
|
||||
--build-arg NUXT_ENV=staging \
|
||||
frontend_service
|
||||
frontend_service mcp
|
||||
|
||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
up -d --force-recreate --remove-orphans --no-deps \
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service mcp
|
||||
|
||||
echo "👉 Current status:"
|
||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||
|
||||
@@ -178,6 +178,32 @@ services:
|
||||
- dev
|
||||
- prod
|
||||
|
||||
mcp:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: docker/mcp.Dockerfile
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||
env_file:
|
||||
- ${ENV_FILE:-../.env}
|
||||
environment:
|
||||
OPENISLE_MCP_BACKEND_BASE_URL: http://springboot:${SERVER_PORT:-8080}
|
||||
OPENISLE_MCP_HOST: 0.0.0.0
|
||||
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8085}
|
||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
|
||||
OPENISLE_MCP_REQUEST_TIMEOUT: ${OPENISLE_MCP_REQUEST_TIMEOUT:-10.0}
|
||||
ports:
|
||||
- "${OPENISLE_MCP_PORT:-8085}:${OPENISLE_MCP_PORT:-8085}"
|
||||
depends_on:
|
||||
springboot:
|
||||
condition: service_started
|
||||
networks:
|
||||
- openisle-network
|
||||
profiles:
|
||||
- dev
|
||||
- dev_local_backend
|
||||
- prod
|
||||
|
||||
|
||||
websocket-service:
|
||||
image: maven:3.9-eclipse-temurin-17
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
|
||||
@@ -213,32 +239,6 @@ services:
|
||||
- dev_local_backend
|
||||
- prod
|
||||
|
||||
mcp-service:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: mcp/Dockerfile
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||
env_file:
|
||||
- ${ENV_FILE:-../.env}
|
||||
environment:
|
||||
FASTMCP_HOST: 0.0.0.0
|
||||
FASTMCP_PORT: ${MCP_PORT:-8765}
|
||||
OPENISLE_BACKEND_URL: ${OPENISLE_BACKEND_URL:-http://springboot:8080}
|
||||
OPENISLE_BACKEND_TIMEOUT: ${OPENISLE_BACKEND_TIMEOUT:-10}
|
||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-sse}
|
||||
OPENISLE_MCP_SSE_MOUNT_PATH: ${OPENISLE_MCP_SSE_MOUNT_PATH:-/mcp}
|
||||
ports:
|
||||
- "${MCP_PORT:-8765}:${MCP_PORT:-8765}"
|
||||
depends_on:
|
||||
springboot:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- openisle-network
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
|
||||
frontend_dev:
|
||||
image: node:20
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev
|
||||
|
||||
21
docker/mcp.Dockerfile
Normal file
21
docker/mcp.Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY mcp/pyproject.toml mcp/README.md ./
|
||||
COPY mcp/src ./src
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir .
|
||||
|
||||
ENV OPENISLE_MCP_HOST=0.0.0.0 \
|
||||
OPENISLE_MCP_PORT=8085 \
|
||||
OPENISLE_MCP_TRANSPORT=streamable-http
|
||||
|
||||
EXPOSE 8085
|
||||
|
||||
CMD ["openisle-mcp"]
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
FROM python:3.11-slim AS runtime
|
||||
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY mcp/pyproject.toml /app/pyproject.toml
|
||||
COPY mcp/README.md /app/README.md
|
||||
COPY mcp/src /app/src
|
||||
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install .
|
||||
|
||||
EXPOSE 8765
|
||||
|
||||
CMD ["openisle-mcp"]
|
||||
@@ -1,39 +1,42 @@
|
||||
# OpenIsle MCP Server
|
||||
|
||||
This package provides a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server that exposes the OpenIsle
|
||||
search capabilities to AI assistants. The server wraps the existing Spring Boot backend and currently provides a single `search`
|
||||
tool. Future iterations can extend the server with additional functionality such as publishing new posts or moderating content.
|
||||
This package provides a [Model Context Protocol](https://modelcontextprotocol.io) (MCP) server
|
||||
that exposes OpenIsle's search capabilities as MCP tools. The initial release focuses on the
|
||||
global search endpoint so the agent ecosystem can retrieve relevant posts, users, tags, and
|
||||
other resources.
|
||||
|
||||
## Features
|
||||
## Configuration
|
||||
|
||||
- 🔍 **Global search** — delegates to the existing `/api/search/global` endpoint exposed by the OpenIsle backend.
|
||||
- 🧠 **Structured results** — responses include highlights and deep links so AI clients can present the results cleanly.
|
||||
- ⚙️ **Configurable** — point the server at any reachable OpenIsle backend by setting environment variables.
|
||||
The server is configured through environment variables (all prefixed with `OPENISLE_MCP_`):
|
||||
|
||||
## Local development
|
||||
| Variable | Default | Description |
|
||||
| --- | --- | --- |
|
||||
| `BACKEND_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend. |
|
||||
| `PORT` | `8085` | TCP port when running with the `streamable-http` transport. |
|
||||
| `HOST` | `0.0.0.0` | Interface to bind when serving HTTP. |
|
||||
| `TRANSPORT` | `streamable-http` | Transport to use (`stdio`, `sse`, or `streamable-http`). |
|
||||
| `REQUEST_TIMEOUT` | `10.0` | Timeout (seconds) for backend HTTP requests. |
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
cd mcp
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -e .
|
||||
openisle-mcp --transport stdio # or "sse"/"streamable-http"
|
||||
pip install .
|
||||
OPENISLE_MCP_BACKEND_BASE_URL="http://localhost:8080" openisle-mcp
|
||||
```
|
||||
|
||||
Environment variables:
|
||||
By default the server listens on port `8085` and serves MCP over Streamable HTTP.
|
||||
|
||||
| Variable | Description | Default |
|
||||
| --- | --- | --- |
|
||||
| `OPENISLE_BACKEND_URL` | Base URL of the Spring Boot backend | `http://springboot:8080` |
|
||||
| `OPENISLE_BACKEND_TIMEOUT` | Timeout (seconds) for backend HTTP calls | `10` |
|
||||
| `OPENISLE_PUBLIC_BASE_URL` | Optional base URL used to build deep links in search results | *(unset)* |
|
||||
| `OPENISLE_MCP_TRANSPORT` | MCP transport (`stdio`, `sse`, `streamable-http`) | `stdio` |
|
||||
| `OPENISLE_MCP_SSE_MOUNT_PATH` | Mount path when using SSE transport | `/mcp` |
|
||||
| `FASTMCP_HOST` | Host for SSE / HTTP transports | `127.0.0.1` |
|
||||
| `FASTMCP_PORT` | Port for SSE / HTTP transports | `8000` |
|
||||
## Available tools
|
||||
|
||||
## Docker
|
||||
| Tool | Description |
|
||||
| --- | --- |
|
||||
| `search` | Perform a global search against the OpenIsle backend. |
|
||||
| `create_post` | Publish a new post using a JWT token. |
|
||||
| `reply_to_post` | Create a new comment on a post using a JWT token. |
|
||||
| `reply_to_comment` | Reply to an existing comment using a JWT token. |
|
||||
| `recent_posts` | Retrieve posts created within the last *N* minutes. |
|
||||
|
||||
A dedicated Docker image is provided and wired into `docker-compose.yaml`. The container listens on
|
||||
`${MCP_PORT:-8765}` and connects to the backend service running in the same compose stack.
|
||||
The tools return structured data mirroring the backend DTOs, including highlighted snippets for
|
||||
search results, the full comment payload for post replies and comment replies, and detailed
|
||||
metadata for recent posts.
|
||||
|
||||
|
||||
@@ -1,29 +1,27 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
requires = ["hatchling>=1.25"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "openisle-mcp"
|
||||
version = "0.1.0"
|
||||
description = "Model Context Protocol server exposing OpenIsle search capabilities"
|
||||
description = "Model Context Protocol server exposing OpenIsle search capabilities."
|
||||
readme = "README.md"
|
||||
authors = [{name = "OpenIsle Team"}]
|
||||
license = {text = "MIT"}
|
||||
authors = [{ name = "OpenIsle", email = "engineering@openisle.example" }]
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"mcp>=1.19.0",
|
||||
"httpx>=0.28.0",
|
||||
"pydantic>=2.12.0",
|
||||
"httpx>=0.28,<0.29",
|
||||
"pydantic>=2.12,<3",
|
||||
"pydantic-settings>=2.11,<3"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
openisle-mcp = "openisle_mcp.server:main"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "src"}
|
||||
[tool.hatch.build]
|
||||
packages = ["src/openisle_mcp"]
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
openisle_mcp = ["py.typed"]
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
"""OpenIsle MCP server package."""
|
||||
|
||||
from importlib import metadata
|
||||
from .config import Settings, get_settings
|
||||
|
||||
try:
|
||||
__version__ = metadata.version("openisle-mcp")
|
||||
except metadata.PackageNotFoundError: # pragma: no cover - best effort during dev
|
||||
__version__ = "0.0.0"
|
||||
__all__ = ["Settings", "get_settings"]
|
||||
|
||||
__all__ = ["__version__"]
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
"""HTTP client for talking to the OpenIsle backend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
import httpx
|
||||
from pydantic import ValidationError
|
||||
|
||||
from .models import BackendSearchResult
|
||||
|
||||
__all__ = ["BackendClientError", "OpenIsleBackendClient"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BackendClientError(RuntimeError):
|
||||
"""Raised when the backend cannot fulfil a request."""
|
||||
|
||||
|
||||
class OpenIsleBackendClient:
|
||||
"""Tiny wrapper around the Spring Boot search endpoints."""
|
||||
|
||||
def __init__(self, base_url: str, timeout: float = 10.0) -> None:
|
||||
if not base_url:
|
||||
raise ValueError("base_url must not be empty")
|
||||
self._base_url = base_url.rstrip("/")
|
||||
timeout = timeout if timeout > 0 else 10.0
|
||||
self._timeout = httpx.Timeout(timeout, connect=timeout, read=timeout)
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
return self._base_url
|
||||
|
||||
async def search_global(self, keyword: str) -> List[BackendSearchResult]:
|
||||
"""Call `/api/search/global` and normalise the payload."""
|
||||
|
||||
url = f"{self._base_url}/api/search/global"
|
||||
params = {"keyword": keyword}
|
||||
headers = {"Accept": "application/json"}
|
||||
logger.debug("Calling OpenIsle backend", extra={"url": url, "params": params})
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self._timeout, headers=headers, follow_redirects=True) as client:
|
||||
response = await client.get(url, params=params)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors are rare in tests
|
||||
body_preview = _truncate_body(exc.response.text)
|
||||
raise BackendClientError(
|
||||
f"Backend returned HTTP {exc.response.status_code}: {body_preview}"
|
||||
) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors are rare in tests
|
||||
raise BackendClientError(f"Failed to reach backend: {exc}") from exc
|
||||
|
||||
try:
|
||||
payload = response.json()
|
||||
except json.JSONDecodeError as exc:
|
||||
raise BackendClientError("Backend returned invalid JSON") from exc
|
||||
|
||||
if not isinstance(payload, list):
|
||||
raise BackendClientError("Unexpected search payload type; expected a list")
|
||||
|
||||
results: list[BackendSearchResult] = []
|
||||
for item in payload:
|
||||
try:
|
||||
results.append(BackendSearchResult.model_validate(item))
|
||||
except ValidationError as exc:
|
||||
raise BackendClientError(f"Invalid search result payload: {exc}") from exc
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _truncate_body(body: str, limit: int = 200) -> str:
|
||||
body = body.strip()
|
||||
if len(body) <= limit:
|
||||
return body
|
||||
return f"{body[:limit]}…"
|
||||
66
mcp/src/openisle_mcp/config.py
Normal file
66
mcp/src/openisle_mcp/config.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Application configuration helpers for the OpenIsle MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field, SecretStr
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Configuration for the MCP server."""
|
||||
|
||||
backend_base_url: AnyHttpUrl = Field(
|
||||
"http://springboot:8080",
|
||||
description="Base URL for the OpenIsle backend service.",
|
||||
)
|
||||
host: str = Field(
|
||||
"0.0.0.0",
|
||||
description="Host interface to bind when running with HTTP transports.",
|
||||
)
|
||||
port: int = Field(
|
||||
8085,
|
||||
ge=1,
|
||||
le=65535,
|
||||
description="TCP port for HTTP transports.",
|
||||
)
|
||||
transport: Literal["stdio", "sse", "streamable-http"] = Field(
|
||||
"streamable-http",
|
||||
description="MCP transport to use when running the server.",
|
||||
)
|
||||
request_timeout: float = Field(
|
||||
10.0,
|
||||
gt=0,
|
||||
description="Timeout (seconds) for backend search requests.",
|
||||
)
|
||||
access_token: SecretStr | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token used for authenticated backend calls. "
|
||||
"When set, tools that support authentication will use this token "
|
||||
"automatically unless an explicit token override is provided."
|
||||
),
|
||||
)
|
||||
log_level: str = Field(
|
||||
"INFO",
|
||||
description=(
|
||||
"Logging level for the MCP server (e.g. DEBUG, INFO, WARNING)."
|
||||
),
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_prefix="OPENISLE_MCP_",
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_settings() -> Settings:
|
||||
"""Return cached application settings."""
|
||||
|
||||
return Settings()
|
||||
@@ -1,58 +0,0 @@
|
||||
"""Pydantic models used by the OpenIsle MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
__all__ = [
|
||||
"BackendSearchResult",
|
||||
"SearchResult",
|
||||
"SearchResponse",
|
||||
]
|
||||
|
||||
|
||||
class BackendSearchResult(BaseModel):
|
||||
"""Shape of the payload returned by the OpenIsle backend."""
|
||||
|
||||
type: str
|
||||
id: Optional[int] = None
|
||||
text: Optional[str] = None
|
||||
sub_text: Optional[str] = Field(default=None, alias="subText")
|
||||
extra: Optional[str] = None
|
||||
post_id: Optional[int] = Field(default=None, alias="postId")
|
||||
highlighted_text: Optional[str] = Field(default=None, alias="highlightedText")
|
||||
highlighted_sub_text: Optional[str] = Field(default=None, alias="highlightedSubText")
|
||||
highlighted_extra: Optional[str] = Field(default=None, alias="highlightedExtra")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="ignore")
|
||||
|
||||
|
||||
class SearchResult(BaseModel):
|
||||
"""Structured search result returned to MCP clients."""
|
||||
|
||||
type: str = Field(description="Entity type, e.g. post, comment, user")
|
||||
id: Optional[int] = Field(default=None, description="Primary identifier for the entity")
|
||||
title: Optional[str] = Field(default=None, description="Primary text to display")
|
||||
subtitle: Optional[str] = Field(default=None, description="Secondary text (e.g. author or category)")
|
||||
extra: Optional[str] = Field(default=None, description="Additional descriptive snippet")
|
||||
post_id: Optional[int] = Field(default=None, description="Associated post id for comment results")
|
||||
url: Optional[str] = Field(default=None, description="Deep link to the resource inside OpenIsle")
|
||||
highlights: Dict[str, Optional[str]] = Field(
|
||||
default_factory=dict,
|
||||
description="Highlighted HTML fragments keyed by field name",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
"""Response envelope returned from the MCP search tool."""
|
||||
|
||||
keyword: str = Field(description="Sanitised keyword that was searched for")
|
||||
total_results: int = Field(description="Total number of results returned by the backend")
|
||||
limit: int = Field(description="Maximum number of results included in the response")
|
||||
results: list[SearchResult] = Field(default_factory=list, description="Search results up to the requested limit")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
378
mcp/src/openisle_mcp/schemas.py
Normal file
378
mcp/src/openisle_mcp/schemas.py
Normal file
@@ -0,0 +1,378 @@
|
||||
"""Pydantic models describing tool inputs and outputs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict, field_validator
|
||||
|
||||
|
||||
class SearchResultItem(BaseModel):
|
||||
"""A single search result entry."""
|
||||
|
||||
type: str = Field(description="Entity type for the result (post, user, tag, etc.).")
|
||||
id: Optional[int] = Field(default=None, description="Identifier of the matched entity.")
|
||||
text: Optional[str] = Field(default=None, description="Primary text associated with the result.")
|
||||
sub_text: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="subText",
|
||||
description="Secondary text, e.g. a username or excerpt.",
|
||||
)
|
||||
extra: Optional[str] = Field(default=None, description="Additional contextual information.")
|
||||
post_id: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="postId",
|
||||
description="Associated post identifier when relevant.",
|
||||
)
|
||||
highlighted_text: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="highlightedText",
|
||||
description="Highlighted snippet of the primary text if available.",
|
||||
)
|
||||
highlighted_sub_text: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="highlightedSubText",
|
||||
description="Highlighted snippet of the secondary text if available.",
|
||||
)
|
||||
highlighted_extra: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="highlightedExtra",
|
||||
description="Highlighted snippet of extra information if available.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
"""Structured response returned by the search tool."""
|
||||
|
||||
keyword: str = Field(description="The keyword that was searched.")
|
||||
total: int = Field(description="Total number of matches returned by the backend.")
|
||||
results: list[SearchResultItem] = Field(
|
||||
default_factory=list,
|
||||
description="Ordered collection of search results.",
|
||||
)
|
||||
|
||||
|
||||
class AuthorInfo(BaseModel):
|
||||
"""Summary of a post or comment author."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Author identifier.")
|
||||
username: Optional[str] = Field(default=None, description="Author username.")
|
||||
avatar: Optional[str] = Field(default=None, description="URL of the author's avatar.")
|
||||
display_medal: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="displayMedal",
|
||||
description="Medal displayed next to the author, when available.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class CategoryInfo(BaseModel):
|
||||
"""Basic information about a post category."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Category identifier.")
|
||||
name: Optional[str] = Field(default=None, description="Category name.")
|
||||
description: Optional[str] = Field(
|
||||
default=None, description="Human friendly description of the category."
|
||||
)
|
||||
icon: Optional[str] = Field(default=None, description="Icon URL associated with the category.")
|
||||
small_icon: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="smallIcon",
|
||||
description="Compact icon URL for the category.",
|
||||
)
|
||||
count: Optional[int] = Field(default=None, description="Number of posts within the category.")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class TagInfo(BaseModel):
|
||||
"""Details for a tag assigned to a post."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Tag identifier.")
|
||||
name: Optional[str] = Field(default=None, description="Tag name.")
|
||||
description: Optional[str] = Field(default=None, description="Description of the tag.")
|
||||
icon: Optional[str] = Field(default=None, description="Icon URL for the tag.")
|
||||
small_icon: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="smallIcon",
|
||||
description="Compact icon URL for the tag.",
|
||||
)
|
||||
created_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="createdAt",
|
||||
description="When the tag was created.",
|
||||
)
|
||||
count: Optional[int] = Field(default=None, description="Number of posts using the tag.")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class ReactionInfo(BaseModel):
|
||||
"""Representation of a reaction on a post or comment."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Reaction identifier.")
|
||||
type: Optional[str] = Field(default=None, description="Reaction type (emoji, like, etc.).")
|
||||
user: Optional[str] = Field(default=None, description="Username of the reacting user.")
|
||||
post_id: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="postId",
|
||||
description="Related post identifier when applicable.",
|
||||
)
|
||||
comment_id: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="commentId",
|
||||
description="Related comment identifier when applicable.",
|
||||
)
|
||||
message_id: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="messageId",
|
||||
description="Related message identifier when applicable.",
|
||||
)
|
||||
reward: Optional[int] = Field(default=None, description="Reward granted for the reaction, if any.")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class CommentData(BaseModel):
|
||||
"""Comment information returned by the backend."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Comment identifier.")
|
||||
content: Optional[str] = Field(default=None, description="Markdown content of the comment.")
|
||||
created_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="createdAt",
|
||||
description="Timestamp when the comment was created.",
|
||||
)
|
||||
pinned_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="pinnedAt",
|
||||
description="Timestamp when the comment was pinned, if applicable.",
|
||||
)
|
||||
author: Optional[AuthorInfo] = Field(default=None, description="Author of the comment.")
|
||||
replies: list["CommentData"] = Field(
|
||||
default_factory=list,
|
||||
description="Nested replies associated with the comment.",
|
||||
)
|
||||
reactions: list[ReactionInfo] = Field(
|
||||
default_factory=list,
|
||||
description="Reactions applied to the comment.",
|
||||
)
|
||||
reward: Optional[int] = Field(default=None, description="Reward gained by posting the comment.")
|
||||
point_reward: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="pointReward",
|
||||
description="Points rewarded for the comment.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("replies", "reactions", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comment_lists(cls, value: Any) -> list[Any]:
|
||||
"""Convert ``None`` payloads to empty lists for comment collections."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class CommentReplyResult(BaseModel):
|
||||
"""Structured response returned when replying to a comment."""
|
||||
|
||||
comment: CommentData = Field(description="Reply comment returned by the backend.")
|
||||
|
||||
|
||||
class CommentCreateResult(BaseModel):
|
||||
"""Structured response returned when creating a comment on a post."""
|
||||
|
||||
comment: CommentData = Field(description="Comment returned by the backend.")
|
||||
|
||||
|
||||
class PostCreateResult(BaseModel):
|
||||
"""Structured response returned when creating a new post."""
|
||||
|
||||
post: PostDetail = Field(description="Detailed post payload returned by the backend.")
|
||||
|
||||
|
||||
class PostSummary(BaseModel):
|
||||
"""Summary information for a post."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Post identifier.")
|
||||
title: Optional[str] = Field(default=None, description="Title of the post.")
|
||||
content: Optional[str] = Field(default=None, description="Excerpt or content of the post.")
|
||||
created_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="createdAt",
|
||||
description="When the post was created.",
|
||||
)
|
||||
author: Optional[AuthorInfo] = Field(default=None, description="Author who created the post.")
|
||||
category: Optional[CategoryInfo] = Field(default=None, description="Category of the post.")
|
||||
tags: list[TagInfo] = Field(default_factory=list, description="Tags assigned to the post.")
|
||||
views: Optional[int] = Field(default=None, description="Total view count for the post.")
|
||||
comment_count: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="commentCount",
|
||||
description="Number of comments on the post.",
|
||||
)
|
||||
status: Optional[str] = Field(default=None, description="Workflow status of the post.")
|
||||
pinned_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="pinnedAt",
|
||||
description="When the post was pinned, if ever.",
|
||||
)
|
||||
last_reply_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="lastReplyAt",
|
||||
description="Timestamp of the most recent reply.",
|
||||
)
|
||||
reactions: list[ReactionInfo] = Field(
|
||||
default_factory=list,
|
||||
description="Reactions received by the post.",
|
||||
)
|
||||
participants: list[AuthorInfo] = Field(
|
||||
default_factory=list,
|
||||
description="Users participating in the discussion.",
|
||||
)
|
||||
subscribed: Optional[bool] = Field(
|
||||
default=None,
|
||||
description="Whether the current user is subscribed to the post.",
|
||||
)
|
||||
reward: Optional[int] = Field(default=None, description="Reward granted for the post.")
|
||||
point_reward: Optional[int] = Field(
|
||||
default=None,
|
||||
alias="pointReward",
|
||||
description="Points granted for the post.",
|
||||
)
|
||||
type: Optional[str] = Field(default=None, description="Type of the post.")
|
||||
lottery: Optional[dict[str, Any]] = Field(
|
||||
default=None, description="Lottery information for the post."
|
||||
)
|
||||
poll: Optional[dict[str, Any]] = Field(
|
||||
default=None, description="Poll information for the post."
|
||||
)
|
||||
rss_excluded: Optional[bool] = Field(
|
||||
default=None,
|
||||
alias="rssExcluded",
|
||||
description="Whether the post is excluded from RSS feeds.",
|
||||
)
|
||||
closed: Optional[bool] = Field(default=None, description="Whether the post is closed for replies.")
|
||||
visible_scope: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="visibleScope",
|
||||
description="Visibility scope configuration for the post.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("tags", "reactions", "participants", mode="before")
|
||||
@classmethod
|
||||
def _ensure_post_lists(cls, value: Any) -> list[Any]:
|
||||
"""Normalize ``None`` values returned by the backend to empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class RecentPostsResponse(BaseModel):
|
||||
"""Structured response for the recent posts tool."""
|
||||
|
||||
minutes: int = Field(description="Time window, in minutes, used for the query.")
|
||||
total: int = Field(description="Number of posts returned by the backend.")
|
||||
posts: list[PostSummary] = Field(
|
||||
default_factory=list,
|
||||
description="Posts created within the requested time window.",
|
||||
)
|
||||
|
||||
|
||||
CommentData.model_rebuild()
|
||||
|
||||
|
||||
class PostDetail(PostSummary):
|
||||
"""Detailed information for a single post, including comments."""
|
||||
|
||||
comments: list[CommentData] = Field(
|
||||
default_factory=list,
|
||||
description="Comments that belong to the post.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("comments", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comments_list(cls, value: Any) -> list[Any]:
|
||||
"""Treat ``None`` comments payloads as empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class NotificationData(BaseModel):
|
||||
"""Unread notification payload returned by the backend."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Notification identifier.")
|
||||
type: Optional[str] = Field(default=None, description="Type of the notification.")
|
||||
post: Optional[PostSummary] = Field(
|
||||
default=None, description="Post associated with the notification if applicable."
|
||||
)
|
||||
comment: Optional[CommentData] = Field(
|
||||
default=None, description="Comment referenced by the notification when available."
|
||||
)
|
||||
parent_comment: Optional[CommentData] = Field(
|
||||
default=None,
|
||||
alias="parentComment",
|
||||
description="Parent comment for nested replies, when present.",
|
||||
)
|
||||
from_user: Optional[AuthorInfo] = Field(
|
||||
default=None,
|
||||
alias="fromUser",
|
||||
description="User who triggered the notification.",
|
||||
)
|
||||
reaction_type: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="reactionType",
|
||||
description="Reaction type for reaction-based notifications.",
|
||||
)
|
||||
content: Optional[str] = Field(
|
||||
default=None, description="Additional content or message for the notification."
|
||||
)
|
||||
approved: Optional[bool] = Field(
|
||||
default=None, description="Approval status for moderation notifications."
|
||||
)
|
||||
read: Optional[bool] = Field(default=None, description="Whether the notification is read.")
|
||||
created_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="createdAt",
|
||||
description="Timestamp when the notification was created.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class UnreadNotificationsResponse(BaseModel):
|
||||
"""Structured response for unread notification queries."""
|
||||
|
||||
page: int = Field(description="Requested page index for the unread notifications.")
|
||||
size: int = Field(description="Requested page size for the unread notifications.")
|
||||
total: int = Field(description="Number of unread notifications returned in this page.")
|
||||
notifications: list[NotificationData] = Field(
|
||||
default_factory=list,
|
||||
description="Unread notifications returned by the backend.",
|
||||
)
|
||||
|
||||
|
||||
class NotificationCleanupResult(BaseModel):
|
||||
"""Structured response returned after marking notifications as read."""
|
||||
|
||||
processed_ids: list[int] = Field(
|
||||
default_factory=list,
|
||||
description="Identifiers that were marked as read in the backend.",
|
||||
)
|
||||
total_marked: int = Field(
|
||||
description="Total number of notifications successfully marked as read.",
|
||||
)
|
||||
345
mcp/src/openisle_mcp/search_client.py
Normal file
345
mcp/src/openisle_mcp/search_client.py
Normal file
@@ -0,0 +1,345 @@
|
||||
"""HTTP client helpers for talking to the OpenIsle backend endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchClient:
|
||||
"""Client for calling the OpenIsle HTTP APIs used by the MCP server."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
*,
|
||||
timeout: float = 10.0,
|
||||
access_token: str | None = None,
|
||||
) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
self._timeout = timeout
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
self._access_token = self._sanitize_token(access_token)
|
||||
|
||||
def _get_client(self) -> httpx.AsyncClient:
|
||||
if self._client is None:
|
||||
logger.debug(
|
||||
"Creating httpx.AsyncClient for base URL %s with timeout %.2fs",
|
||||
self._base_url,
|
||||
self._timeout,
|
||||
)
|
||||
self._client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
timeout=self._timeout,
|
||||
)
|
||||
return self._client
|
||||
|
||||
@staticmethod
|
||||
def _sanitize_token(token: str | None) -> str | None:
|
||||
if token is None:
|
||||
return None
|
||||
stripped = token.strip()
|
||||
return stripped or None
|
||||
|
||||
def update_access_token(self, token: str | None) -> None:
|
||||
"""Update the default access token used for authenticated requests."""
|
||||
|
||||
self._access_token = self._sanitize_token(token)
|
||||
if self._access_token:
|
||||
logger.debug("Configured default access token for SearchClient requests.")
|
||||
else:
|
||||
logger.debug("Cleared default access token for SearchClient requests.")
|
||||
|
||||
def _resolve_token(self, token: str | None) -> str | None:
|
||||
candidate = self._sanitize_token(token)
|
||||
if candidate is not None:
|
||||
return candidate
|
||||
return self._access_token
|
||||
|
||||
def _require_token(self, token: str | None) -> str:
|
||||
resolved = self._resolve_token(token)
|
||||
if resolved is None:
|
||||
raise ValueError(
|
||||
"Authenticated request requires an access token. Provide a Bearer token "
|
||||
"via the MCP Authorization header or configure a default token for the server."
|
||||
)
|
||||
return resolved
|
||||
|
||||
def _build_headers(
|
||||
self,
|
||||
*,
|
||||
token: str | None = None,
|
||||
accept: str = "application/json",
|
||||
include_json: bool = False,
|
||||
) -> dict[str, str]:
|
||||
headers: dict[str, str] = {"Accept": accept}
|
||||
resolved = self._resolve_token(token)
|
||||
if resolved:
|
||||
headers["Authorization"] = f"Bearer {resolved}"
|
||||
if include_json:
|
||||
headers["Content-Type"] = "application/json"
|
||||
return headers
|
||||
|
||||
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
|
||||
"""Call the global search endpoint and return the parsed JSON payload."""
|
||||
|
||||
client = self._get_client()
|
||||
logger.debug("Calling global search with keyword=%s", keyword)
|
||||
response = await client.get(
|
||||
"/api/search/global",
|
||||
params={"keyword": keyword},
|
||||
headers=self._build_headers(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list):
|
||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
||||
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
|
||||
logger.info(
|
||||
"Global search returned %d results for keyword '%s'",
|
||||
len(payload),
|
||||
keyword,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def reply_to_comment(
|
||||
self,
|
||||
comment_id: int,
|
||||
content: str,
|
||||
*,
|
||||
token: str | None = None,
|
||||
captcha: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Reply to an existing comment and return the created reply."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
payload: dict[str, Any] = {"content": content}
|
||||
if captcha is not None:
|
||||
stripped_captcha = captcha.strip()
|
||||
if stripped_captcha:
|
||||
payload["captcha"] = stripped_captcha
|
||||
|
||||
logger.debug(
|
||||
"Posting reply to comment_id=%s (captcha=%s)",
|
||||
comment_id,
|
||||
bool(captcha),
|
||||
)
|
||||
response = await client.post(
|
||||
f"/api/comments/{comment_id}/replies",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Reply to comment_id=%s succeeded with id=%s", comment_id, body.get("id"))
|
||||
return body
|
||||
|
||||
async def reply_to_post(
|
||||
self,
|
||||
post_id: int,
|
||||
content: str,
|
||||
*,
|
||||
token: str | None = None,
|
||||
captcha: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create a comment on a post and return the backend payload."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
payload: dict[str, Any] = {"content": content}
|
||||
if captcha is not None:
|
||||
stripped_captcha = captcha.strip()
|
||||
if stripped_captcha:
|
||||
payload["captcha"] = stripped_captcha
|
||||
|
||||
logger.debug(
|
||||
"Posting comment to post_id=%s (captcha=%s)",
|
||||
post_id,
|
||||
bool(captcha),
|
||||
)
|
||||
response = await client.post(
|
||||
f"/api/posts/{post_id}/comments",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Reply to post_id=%s succeeded with id=%s", post_id, body.get("id"))
|
||||
return body
|
||||
|
||||
async def create_post(
|
||||
self,
|
||||
payload: dict[str, Any],
|
||||
*,
|
||||
token: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create a new post and return the detailed backend payload."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
|
||||
logger.debug(
|
||||
"Creating post with category_id=%s and %d tag(s)",
|
||||
payload.get("categoryId"),
|
||||
len(payload.get("tagIds", []) if isinstance(payload.get("tagIds"), list) else []),
|
||||
)
|
||||
response = await client.post(
|
||||
"/api/posts",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Post creation succeeded with id=%s", body.get("id"))
|
||||
return body
|
||||
|
||||
async def recent_posts(self, minutes: int) -> list[dict[str, Any]]:
|
||||
"""Return posts created within the given timeframe."""
|
||||
|
||||
client = self._get_client()
|
||||
logger.debug(
|
||||
"Fetching recent posts within last %s minutes",
|
||||
minutes,
|
||||
)
|
||||
response = await client.get(
|
||||
"/api/posts/recent",
|
||||
params={"minutes": minutes},
|
||||
headers=self._build_headers(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list):
|
||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
||||
raise ValueError(
|
||||
f"Unexpected response format from recent posts endpoint: {formatted}"
|
||||
)
|
||||
logger.info(
|
||||
"Fetched %d recent posts for window=%s minutes",
|
||||
len(payload),
|
||||
minutes,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def get_post(self, post_id: int, token: str | None = None) -> dict[str, Any]:
|
||||
"""Retrieve the detailed payload for a single post."""
|
||||
|
||||
client = self._get_client()
|
||||
headers = self._build_headers(token=token)
|
||||
logger.debug("Fetching post details for post_id=%s", post_id)
|
||||
response = await client.get(f"/api/posts/{post_id}", headers=headers)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info(
|
||||
"Retrieved post_id=%s successfully with %d top-level comments",
|
||||
post_id,
|
||||
len(body.get("comments", []) if isinstance(body.get("comments"), list) else []),
|
||||
)
|
||||
return body
|
||||
|
||||
async def list_unread_notifications(
|
||||
self,
|
||||
*,
|
||||
page: int = 0,
|
||||
size: int = 30,
|
||||
token: str | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Return unread notifications for the authenticated user."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
logger.debug(
|
||||
"Fetching unread notifications with page=%s, size=%s",
|
||||
page,
|
||||
size,
|
||||
)
|
||||
response = await client.get(
|
||||
"/api/notifications/unread",
|
||||
params={"page": page, "size": size},
|
||||
headers=self._build_headers(token=resolved_token),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list):
|
||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
||||
raise ValueError(
|
||||
"Unexpected response format from unread notifications endpoint: "
|
||||
f"{formatted}"
|
||||
)
|
||||
logger.info(
|
||||
"Fetched %d unread notifications (page=%s, size=%s)",
|
||||
len(payload),
|
||||
page,
|
||||
size,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def mark_notifications_read(
|
||||
self,
|
||||
ids: list[int],
|
||||
*,
|
||||
token: str | None = None,
|
||||
) -> None:
|
||||
"""Mark the provided notifications as read for the authenticated user."""
|
||||
|
||||
if not ids:
|
||||
raise ValueError(
|
||||
"At least one notification identifier must be provided to mark as read."
|
||||
)
|
||||
|
||||
sanitized_ids: list[int] = []
|
||||
for value in ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Notification identifiers must be integers, not booleans.")
|
||||
try:
|
||||
converted = int(value)
|
||||
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
|
||||
raise ValueError(
|
||||
"Notification identifiers must be integers."
|
||||
) from exc
|
||||
if converted <= 0:
|
||||
raise ValueError(
|
||||
"Notification identifiers must be positive integers."
|
||||
)
|
||||
sanitized_ids.append(converted)
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
logger.debug(
|
||||
"Marking %d notifications as read: ids=%s",
|
||||
len(sanitized_ids),
|
||||
sanitized_ids,
|
||||
)
|
||||
response = await client.post(
|
||||
"/api/notifications/read",
|
||||
json={"ids": sanitized_ids},
|
||||
headers=self._build_headers(token=resolved_token, include_json=True),
|
||||
)
|
||||
response.raise_for_status()
|
||||
logger.info(
|
||||
"Successfully marked %d notifications as read.",
|
||||
len(sanitized_ids),
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
"""Dispose of the underlying HTTP client."""
|
||||
|
||||
if self._client is not None:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
logger.debug("Closed httpx.AsyncClient for SearchClient.")
|
||||
|
||||
@staticmethod
|
||||
def _ensure_dict(entry: Any) -> dict[str, Any]:
|
||||
if not isinstance(entry, dict):
|
||||
raise ValueError(f"Expected JSON object, got: {type(entry)!r}")
|
||||
return entry
|
||||
File diff suppressed because it is too large
Load Diff
@@ -100,10 +100,28 @@ server {
|
||||
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||
}
|
||||
|
||||
|
||||
# ---------- WEBSOCKET GATEWAY TO :8082 ----------
|
||||
location ^~ /websocket/ {
|
||||
proxy_pass http://127.0.0.1:8084/;
|
||||
proxy_pass http://127.0.0.1:8084/;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
proxy_read_timeout 300s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
add_header Cache-Control "no-store" always;
|
||||
}
|
||||
|
||||
location /mcp {
|
||||
proxy_pass http://127.0.0.1:8085;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
|
||||
@@ -8,11 +8,8 @@ server {
|
||||
listen 443 ssl;
|
||||
server_name staging.open-isle.com www.staging.open-isle.com;
|
||||
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
|
||||
# ssl_certificate /etc/letsencrypt/live/open-isle.com/fullchain.pem;
|
||||
# ssl_certificate_key /etc/letsencrypt/live/open-isle.com/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
@@ -40,59 +37,13 @@ server {
|
||||
add_header X-Upstream $upstream_addr always;
|
||||
}
|
||||
|
||||
# 1) 原生 WebSocket
|
||||
location ^~ /api/ws {
|
||||
proxy_pass http://127.0.0.1:8081; # 不要尾随 /,保留原样 URI
|
||||
proxy_http_version 1.1;
|
||||
|
||||
# 升级所需
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
# 统一透传这些头(你在 /api/ 有,/api/ws 也要有)
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
proxy_read_timeout 300s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
}
|
||||
|
||||
# 2) SockJS(包含 /info、/iframe.html、/.../websocket 等)
|
||||
location ^~ /api/sockjs {
|
||||
proxy_pass http://127.0.0.1:8081;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
proxy_read_timeout 300s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
|
||||
# 如要同源 iframe 回退,下面两行二选一(或者交给 Spring Security 的 sameOrigin)
|
||||
# proxy_hide_header X-Frame-Options;
|
||||
# add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
}
|
||||
|
||||
# ---------- API ----------
|
||||
location /api/ {
|
||||
proxy_pass http://127.0.0.1:8081/api/;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
|
||||
proxy_set_header Host $host;
|
||||
@@ -109,7 +60,6 @@ server {
|
||||
proxy_cache_bypass 1;
|
||||
}
|
||||
|
||||
# ---------- WEBSOCKET GATEWAY TO :8083 ----------
|
||||
location ^~ /websocket/ {
|
||||
proxy_pass http://127.0.0.1:8083/;
|
||||
proxy_http_version 1.1;
|
||||
@@ -130,4 +80,24 @@ server {
|
||||
add_header Cache-Control "no-store" always;
|
||||
}
|
||||
|
||||
}
|
||||
location /mcp {
|
||||
proxy_pass http://127.0.0.1:8086;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
proxy_read_timeout 300s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
add_header Cache-Control "no-store" always;
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user