Compare commits

..

1 Commits

Author SHA1 Message Date
Tim
ab91ec2489 feat: add MCP search service 2025-10-25 22:05:25 +08:00
30 changed files with 345 additions and 2257 deletions

View File

@@ -2,11 +2,15 @@
SERVER_PORT=8080
FRONTEND_PORT=3000
WEBSOCKET_PORT=8082
OPENISLE_MCP_PORT=8085
MCP_PORT=9090
MYSQL_PORT=3306
REDIS_PORT=6379
RABBITMQ_PORT=5672
RABBITMQ_MANAGEMENT_PORT=15672
MCP_HOST=0.0.0.0
MCP_BACKEND_BASE_URL=http://springboot:8080
MCP_CONNECT_TIMEOUT=5
MCP_READ_TIMEOUT=10
# === OpenSearch Configuration ===
OPENSEARCH_PORT=9200

View File

@@ -1,29 +0,0 @@
name: Coffee Bot
on:
schedule:
- cron: "0 1 * * *"
workflow_dispatch:
jobs:
run-coffee-bot:
environment: Bots
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "npm"
- name: Install dependencies
run: npm install --no-save @openai/agents tsx typescript
- name: Run coffee bot
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
run: npx tsx bots/instance/coffee_bot.ts

View File

@@ -1,29 +0,0 @@
name: Reply Bots
on:
schedule:
- cron: "*/30 * * * *"
workflow_dispatch:
jobs:
run-reply-bot:
environment: Bots
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "npm"
- name: Install dependencies
run: npm install --no-save @openai/agents tsx typescript
- name: Run reply bot
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
run: npx tsx bots/instance/reply_bot.ts

View File

@@ -1,13 +1,11 @@
package com.openisle.controller;
import com.openisle.dto.CommentContextDto;
import com.openisle.dto.CommentDto;
import com.openisle.dto.CommentRequest;
import com.openisle.dto.PostChangeLogDto;
import com.openisle.dto.TimelineItemDto;
import com.openisle.mapper.CommentMapper;
import com.openisle.mapper.PostChangeLogMapper;
import com.openisle.mapper.PostMapper;
import com.openisle.model.Comment;
import com.openisle.model.CommentSort;
import com.openisle.service.*;
@@ -42,7 +40,6 @@ public class CommentController {
private final PointService pointService;
private final PostChangeLogService changeLogService;
private final PostChangeLogMapper postChangeLogMapper;
private final PostMapper postMapper;
@Value("${app.captcha.enabled:false}")
private boolean captchaEnabled;
@@ -187,37 +184,6 @@ public class CommentController {
return itemDtoList;
}
@GetMapping("/comments/{commentId}/context")
@Operation(
summary = "Comment context",
description = "Get a comment along with its previous comments and related post"
)
@ApiResponse(
responseCode = "200",
description = "Comment context",
content = @Content(schema = @Schema(implementation = CommentContextDto.class))
)
public ResponseEntity<CommentContextDto> getCommentContext(@PathVariable Long commentId) {
log.debug("getCommentContext called for comment {}", commentId);
Comment comment = commentService.getComment(commentId);
CommentContextDto dto = new CommentContextDto();
dto.setPost(postMapper.toSummaryDto(comment.getPost()));
dto.setTargetComment(commentMapper.toDtoWithReplies(comment));
dto.setPreviousComments(
commentService
.getCommentsBefore(comment)
.stream()
.map(commentMapper::toDtoWithReplies)
.collect(Collectors.toList())
);
log.debug(
"getCommentContext returning {} previous comments for comment {}",
dto.getPreviousComments().size(),
commentId
);
return ResponseEntity.ok(dto);
}
@DeleteMapping("/comments/{id}")
@Operation(summary = "Delete comment", description = "Delete a comment")
@ApiResponse(responseCode = "200", description = "Deleted")

View File

@@ -224,26 +224,6 @@ public class PostController {
.collect(Collectors.toList());
}
@GetMapping("/recent")
@Operation(
summary = "Recent posts",
description = "List posts created within the specified number of minutes"
)
@ApiResponse(
responseCode = "200",
description = "Recent posts",
content = @Content(
array = @ArraySchema(schema = @Schema(implementation = PostSummaryDto.class))
)
)
public List<PostSummaryDto> recentPosts(@RequestParam("minutes") int minutes) {
return postService
.listRecentPosts(minutes)
.stream()
.map(postMapper::toSummaryDto)
.collect(Collectors.toList());
}
@GetMapping("/ranking")
@Operation(summary = "Ranking posts", description = "List posts by view rankings")
@ApiResponse(

View File

@@ -1,15 +0,0 @@
package com.openisle.dto;
import java.util.List;
import lombok.Data;
/**
* DTO representing the context of a comment including its post and previous comments.
*/
@Data
public class CommentContextDto {
private PostSummaryDto post;
private CommentDto targetComment;
private List<CommentDto> previousComments;
}

View File

@@ -3,7 +3,6 @@ package com.openisle.repository;
import com.openisle.model.Comment;
import com.openisle.model.Post;
import com.openisle.model.User;
import java.time.LocalDateTime;
import java.util.List;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
@@ -11,10 +10,6 @@ import org.springframework.data.jpa.repository.JpaRepository;
public interface CommentRepository extends JpaRepository<Comment, Long> {
List<Comment> findByPostAndParentIsNullOrderByCreatedAtAsc(Post post);
List<Comment> findByParentOrderByCreatedAtAsc(Comment parent);
List<Comment> findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
Post post,
LocalDateTime createdAt
);
List<Comment> findByAuthorOrderByCreatedAtDesc(User author, Pageable pageable);
List<Comment> findByContentContainingIgnoreCase(String keyword);

View File

@@ -19,10 +19,6 @@ public interface PostRepository extends JpaRepository<Post, Long> {
List<Post> findByStatusOrderByCreatedAtDesc(PostStatus status, Pageable pageable);
List<Post> findByStatusOrderByViewsDesc(PostStatus status);
List<Post> findByStatusOrderByViewsDesc(PostStatus status, Pageable pageable);
List<Post> findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
PostStatus status,
LocalDateTime createdAt
);
List<Post> findByAuthorAndStatusOrderByCreatedAtDesc(
User author,
PostStatus status,

View File

@@ -266,27 +266,6 @@ public class CommentService {
return replies;
}
public Comment getComment(Long commentId) {
log.debug("getComment called for id {}", commentId);
return commentRepository
.findById(commentId)
.orElseThrow(() -> new com.openisle.exception.NotFoundException("Comment not found"));
}
public List<Comment> getCommentsBefore(Comment comment) {
log.debug("getCommentsBefore called for comment {}", comment.getId());
List<Comment> comments = commentRepository.findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
comment.getPost(),
comment.getCreatedAt()
);
log.debug(
"getCommentsBefore returning {} comments for comment {}",
comments.size(),
comment.getId()
);
return comments;
}
public List<Comment> getRecentCommentsByUser(String username, int limit) {
log.debug("getRecentCommentsByUser called for user {} with limit {}", username, limit);
User user = userRepository

View File

@@ -770,18 +770,6 @@ public class PostService {
return listPostsByCategories(null, null, null);
}
public List<Post> listRecentPosts(int minutes) {
if (minutes <= 0) {
throw new IllegalArgumentException("Minutes must be positive");
}
LocalDateTime since = LocalDateTime.now().minusMinutes(minutes);
List<Post> posts = postRepository.findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
PostStatus.PUBLISHED,
since
);
return sortByPinnedAndCreated(posts);
}
public List<Post> listPostsByViews(Integer page, Integer pageSize) {
return listPostsByViews(null, null, page, pageSize);
}

View File

@@ -1,150 +0,0 @@
import { Agent, Runner, hostedMcpTool, withTrace } from "@openai/agents";
export type WorkflowInput = { input_as_text: string };
export abstract class BotFather {
protected readonly allowedMcpTools = [
"search",
"create_post",
"reply_to_post",
"reply_to_comment",
"recent_posts",
"get_post",
"list_unread_messages",
"mark_notifications_read",
"create_post",
];
protected readonly openisleToken = (process.env.OPENISLE_TOKEN ?? "").trim();
protected readonly mcp = this.createHostedMcpTool();
protected readonly agent: Agent;
constructor(protected readonly name: string) {
console.log(`${this.name} starting...`);
console.log(
"🛠️ Configured Hosted MCP tools:",
this.allowedMcpTools.join(", ")
);
console.log(
this.openisleToken
? "🔑 OPENISLE_TOKEN detected in environment; it will be attached to MCP requests."
: "🔓 OPENISLE_TOKEN not set; authenticated MCP tools may be unavailable."
);
this.agent = new Agent({
name: this.name,
instructions: this.buildInstructions(),
tools: [this.mcp],
model: "gpt-4o",
modelSettings: {
temperature: 0.7,
topP: 1,
maxTokens: 2048,
toolChoice: "auto",
store: true,
},
});
}
protected buildInstructions(): string {
const instructions = [
...this.getBaseInstructions(),
...this.getAdditionalInstructions(),
].filter(Boolean);
return instructions.join("\n");
}
protected getBaseInstructions(): string[] {
return [
"You are a helpful assistant for https://www.open-isle.com.",
"Finish tasks end-to-end before replying. If multiple MCP tools are needed, call them sequentially until the task is truly done.",
"When presenting the result, reply in Chinese with a concise summary and include any important URLs or IDs.",
"After finishing replies, call mark_notifications_read with all processed notification IDs to keep the inbox clean.",
];
}
private createHostedMcpTool() {
const token = this.openisleToken;
const authConfig = token
? {
headers: {
Authorization: `Bearer ${token}`,
},
}
: {};
return hostedMcpTool({
serverLabel: "openisle_mcp",
serverUrl: "https://www.open-isle.com/mcp",
allowedTools: this.allowedMcpTools,
requireApproval: "never",
...authConfig,
});
}
protected getAdditionalInstructions(): string[] {
return [];
}
protected createRunner(): Runner {
return new Runner({
workflowName: this.name,
traceMetadata: {
__trace_source__: "agent-builder",
workflow_id: "wf_69003cbd47e08190928745d3c806c0b50d1a01cfae052be8",
},
});
}
public async runWorkflow(workflow: WorkflowInput) {
if (!process.env.OPENAI_API_KEY) {
throw new Error("Missing OPENAI_API_KEY");
}
const runner = this.createRunner();
return await withTrace(`${this.name} run`, async () => {
const preview = workflow.input_as_text.trim();
console.log(
"📝 Received workflow input (preview):",
preview.length > 200 ? `${preview.slice(0, 200)}` : preview
);
console.log("🚦 Starting agent run with maxTurns=16...");
const result = await runner.run(this.agent, workflow.input_as_text, {
maxTurns: 16,
});
console.log("📬 Agent run completed. Result keys:", Object.keys(result));
if (!result.finalOutput) {
throw new Error("Agent result is undefined (no final output).");
}
const openisleBotResult = { output_text: String(result.finalOutput) };
console.log(
"🤖 Agent result (length=%d):\n%s",
openisleBotResult.output_text.length,
openisleBotResult.output_text
);
return openisleBotResult;
});
}
protected abstract getCliQuery(): string;
public async runCli(): Promise<void> {
try {
const query = this.getCliQuery();
console.log("🔍 Running workflow...");
await this.runWorkflow({ input_as_text: query });
process.exit(0);
} catch (err: any) {
console.error("❌ Agent failed:", err?.stack || err);
process.exit(1);
}
}
}

View File

@@ -1,63 +0,0 @@
import { BotFather, WorkflowInput } from "../bot_father";
const WEEKDAY_NAMES = ["日", "一", "二", "三", "四", "五", "六"] as const;
class CoffeeBot extends BotFather {
constructor() {
super("Coffee Bot");
}
protected override getAdditionalInstructions(): string[] {
return [
"You are responsible for 发布每日抽奖早安贴。",
"创建帖子时,确保标题、奖品信息、开奖时间以及领奖方式完全符合 CLI 查询提供的细节。",
"正文需亲切友好,简洁明了,鼓励社区成员互动。",
"开奖说明需明确告知中奖者需私聊站长 @nagisa 领取奖励。",
"确保只发布一个帖子,避免重复调用 create_post。",
];
}
protected override getCliQuery(): string {
const now = new Date();
const beijingNow = new Date(
now.toLocaleString("en-US", { timeZone: "Asia/Shanghai" })
);
const weekday = WEEKDAY_NAMES[beijingNow.getDay()];
const drawTime = new Date(beijingNow);
drawTime.setHours(15, 0, 0, 0);
const drawTimeText = drawTime
.toLocaleTimeString("zh-CN", {
hour: "2-digit",
minute: "2-digit",
hour12: false,
timeZone: "Asia/Shanghai",
})
.replace(/^24:/, "00:");
return `
请立即在 https://www.open-isle.com 使用 create_post 发表一篇全新帖子,遵循以下要求:
1. 标题固定为「大家星期${weekday}早安--抽一杯咖啡」。
2. 正文包含:
- 亲切的早安问候;
- 明确奖品写作“Coffee x 1”
- 奖品图片链接https://openisle-1307107697.cos.accelerate.myqcloud.com/dynamic_assert/0d6a9b33e9ca4fe5a90540187d3f9ecb.png
- 公布开奖时间为今天下午 15:00北京时间写成 ${drawTimeText}
- 标注“领奖请私聊站长 @nagisa”
- 鼓励大家留言互动。
3. 帖子语言使用简体中文,格式可用 Markdown使关键信息醒目。
4. 完成后只输出“已发布咖啡抽奖贴”,不额外生成总结。
`.trim();
}
}
const coffeeBot = new CoffeeBot();
export const runWorkflow = async (workflow: WorkflowInput) => {
return coffeeBot.runWorkflow(workflow);
};
if (require.main === module) {
coffeeBot.runCli();
}

View File

@@ -1,39 +0,0 @@
// reply_bot.ts
import { BotFather, WorkflowInput } from "../bot_father";
class ReplyBot extends BotFather {
constructor() {
super("OpenIsle Bot");
}
protected override getAdditionalInstructions(): string[] {
return [
"You are a helpful and cute assistant for https://www.open-isle.com. Keep the lovable tone with plentiful kawaii kaomoji (颜表情) such as (๑˃ᴗ˂)ﻭ, (•̀ω•́)✧, (。•ᴗ-)_♡, (⁎⁍̴̛ᴗ⁍̴̛⁎), etc., while staying professional and informative.",
"OpenIsle 是一个由 Spring Boot + Vue 3 打造的开源社区平台提供注册登录、OAuth 登录Google/GitHub/Discord/Twitter、帖子与评论互动、标签分类、草稿、统计分析、通知消息、全局搜索、Markdown 支持、图片上传(默认腾讯云 COS、浏览器推送、DiceBear 头像等功能,旨在帮助团队快速搭建属于自己的技术社区。",
"回复时请主动结合上述站点背景,为用户提供有洞察力、可执行的建议或答案,并在需要时引用官网 https://www.open-isle.com、GitHub 仓库 https://github.com/nagisa77/OpenIsle 或相关文档链接,避免空泛的安慰或套话。",
"When presenting the result, reply in Chinese with a concise yet content-rich summary filled with kaomoji,并清晰列出关键结论、操作步骤、重要 URL 或 ID确保用户能直接采取行动。",
];
}
protected override getCliQuery(): string {
return `
【AUTO】无需确认自动处理所有未读的提及与评论
1调用 list_unread_messages
2依次处理每条“提及/评论”:如需上下文则使用 get_post 获取,生成简明中文回复;如有 commentId 则用 reply_to_comment否则用 reply_to_post
3跳过关注和系统事件
4保证幂等性如该贴最后一条是你自己发的回复则跳过
5调用 mark_notifications_read传入本次已处理的通知 ID 清理已读;
6最多只处理最新10条结束时仅输出简要摘要包含URL或ID
`.trim();
}
}
const replyBot = new ReplyBot();
export const runWorkflow = async (workflow: WorkflowInput) => {
return replyBot.runWorkflow(workflow);
};
if (require.main === module) {
replyBot.runCli();
}

View File

@@ -40,12 +40,12 @@ echo "👉 Build images ..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=production \
frontend_service mcp
frontend_service mcp-service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=staging \
frontend_service mcp
frontend_service mcp-service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -178,32 +178,38 @@ services:
- dev
- prod
mcp:
mcp-service:
build:
context: ..
dockerfile: docker/mcp.Dockerfile
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
OPENISLE_MCP_BACKEND_BASE_URL: http://springboot:${SERVER_PORT:-8080}
OPENISLE_MCP_HOST: 0.0.0.0
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8085}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
OPENISLE_MCP_REQUEST_TIMEOUT: ${OPENISLE_MCP_REQUEST_TIMEOUT:-10.0}
MCP_HOST: ${MCP_HOST:-0.0.0.0}
MCP_PORT: ${MCP_PORT:-9090}
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
ports:
- "${OPENISLE_MCP_PORT:-8085}:${OPENISLE_MCP_PORT:-8085}"
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
depends_on:
springboot:
condition: service_started
condition: service_healthy
command: ["openisle-mcp"]
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
interval: 10s
timeout: 5s
retries: 30
start_period: 20s
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- dev_local_backend
- prod
websocket-service:
image: maven:3.9-eclipse-temurin-17
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket

View File

@@ -1,21 +0,0 @@
FROM python:3.11-slim AS base
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
COPY mcp/pyproject.toml mcp/README.md ./
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
ENV OPENISLE_MCP_HOST=0.0.0.0 \
OPENISLE_MCP_PORT=8085 \
OPENISLE_MCP_TRANSPORT=streamable-http
EXPOSE 8085
CMD ["openisle-mcp"]

21
mcp/Dockerfile Normal file
View File

@@ -0,0 +1,21 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl \
&& rm -rf /var/lib/apt/lists/*
COPY mcp/pyproject.toml ./pyproject.toml
COPY mcp/README.md ./README.md
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
EXPOSE 9090
CMD ["openisle-mcp"]

View File

@@ -1,42 +1,34 @@
# OpenIsle MCP Server
# OpenIsle MCP Service
This package provides a [Model Context Protocol](https://modelcontextprotocol.io) (MCP) server
that exposes OpenIsle's search capabilities as MCP tools. The initial release focuses on the
global search endpoint so the agent ecosystem can retrieve relevant posts, users, tags, and
other resources.
This package hosts a lightweight Python service that exposes OpenIsle search
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
It currently forwards search requests to the main Spring Boot backend and
returns the aggregated results. The service is intentionally simple so we can
iterate quickly and extend it with additional tools (for example, post
creation) in future updates.
## Configuration
The server is configured through environment variables (all prefixed with `OPENISLE_MCP_`):
| Variable | Default | Description |
| --- | --- | --- |
| `BACKEND_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend. |
| `PORT` | `8085` | TCP port when running with the `streamable-http` transport. |
| `HOST` | `0.0.0.0` | Interface to bind when serving HTTP. |
| `TRANSPORT` | `streamable-http` | Transport to use (`stdio`, `sse`, or `streamable-http`). |
| `REQUEST_TIMEOUT` | `10.0` | Timeout (seconds) for backend HTTP requests. |
## Running locally
## Local development
```bash
pip install .
OPENISLE_MCP_BACKEND_BASE_URL="http://localhost:8080" openisle-mcp
pip install -e ./mcp
openisle-mcp
```
By default the server listens on port `8085` and serves MCP over Streamable HTTP.
By default the server listens on port `9090` and expects the Spring Boot backend
at `http://localhost:8080`. Configure the behaviour with the following
environment variables:
## Available tools
- `MCP_PORT` HTTP port the MCP service should listen on (default: `9090`).
- `MCP_HOST` Bind host for the HTTP server (default: `0.0.0.0`).
- `MCP_BACKEND_BASE_URL` Base URL of the Spring Boot backend that provides the
search endpoints (default: `http://springboot:8080`).
- `MCP_CONNECT_TIMEOUT` Connection timeout (seconds) when calling the backend
(default: `5`).
- `MCP_READ_TIMEOUT` Read timeout (seconds) when calling the backend (default:
`10`).
| Tool | Description |
| --- | --- |
| `search` | Perform a global search against the OpenIsle backend. |
| `create_post` | Publish a new post using a JWT token. |
| `reply_to_post` | Create a new comment on a post using a JWT token. |
| `reply_to_comment` | Reply to an existing comment using a JWT token. |
| `recent_posts` | Retrieve posts created within the last *N* minutes. |
The tools return structured data mirroring the backend DTOs, including highlighted snippets for
search results, the full comment payload for post replies and comment replies, and detailed
metadata for recent posts.
## Docker
The repository contains a Dockerfile that builds a slim Python image running the
service with `uvicorn`. The compose configuration wires the container into the
existing OpenIsle stack so that deployments automatically start the MCP service.

View File

@@ -1,27 +1,25 @@
[build-system]
requires = ["hatchling>=1.25"]
requires = ["hatchling>=1.21.0"]
build-backend = "hatchling.build"
[project]
name = "openisle-mcp"
version = "0.1.0"
description = "Model Context Protocol server exposing OpenIsle search capabilities."
description = "Model Context Protocol server exposing OpenIsle search capabilities"
readme = "README.md"
authors = [{ name = "OpenIsle", email = "engineering@openisle.example" }]
authors = [
{ name = "OpenIsle" }
]
requires-python = ">=3.11"
dependencies = [
"mcp>=1.19.0",
"httpx>=0.28,<0.29",
"pydantic>=2.12,<3",
"pydantic-settings>=2.11,<3"
"fastapi>=0.111.0,<1.0.0",
"uvicorn[standard]>=0.29.0,<0.31.0",
"httpx>=0.27.0,<0.28.0",
"pydantic>=2.7.0,<3.0.0"
]
[project.scripts]
openisle-mcp = "openisle_mcp.server:main"
openisle-mcp = "openisle_mcp.__main__:main"
[tool.hatch.build]
[tool.hatch.build.targets.wheel]
packages = ["src/openisle_mcp"]
[tool.ruff]
line-length = 100

View File

@@ -1,6 +1,6 @@
"""OpenIsle MCP server package."""
"""OpenIsle MCP service package."""
from .config import Settings, get_settings
from .server import create_app
__all__ = ["Settings", "get_settings"]
__all__ = ["Settings", "get_settings", "create_app"]

View File

@@ -0,0 +1,24 @@
"""Entrypoint for running the MCP service with ``python -m``."""
from __future__ import annotations
import logging
import uvicorn
from .config import get_settings
def main() -> None:
settings = get_settings()
logging.basicConfig(level=logging.INFO)
uvicorn.run(
"openisle_mcp.server:create_app",
host=settings.host,
port=settings.port,
factory=True,
)
if __name__ == "__main__": # pragma: no cover
main()

View File

@@ -0,0 +1,44 @@
"""HTTP client helpers for talking to the Spring Boot backend."""
from __future__ import annotations
import logging
from typing import Any
import httpx
from .config import Settings
LOGGER = logging.getLogger(__name__)
class SearchClient:
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
def __init__(self, settings: Settings):
timeout = httpx.Timeout(
connect=settings.connect_timeout,
read=settings.read_timeout,
write=settings.read_timeout,
pool=None,
)
self._client = httpx.AsyncClient(
base_url=settings.normalized_backend_base_url,
timeout=timeout,
)
async def close(self) -> None:
await self._client.aclose()
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
LOGGER.debug("Performing global search for keyword '%s'", keyword)
response = await self._client.get("/api/search/global", params={"keyword": keyword})
response.raise_for_status()
payload = response.json()
if isinstance(payload, list):
return payload
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
return []
__all__ = ["SearchClient"]

View File

@@ -1,66 +1,71 @@
"""Application configuration helpers for the OpenIsle MCP server."""
"""Configuration helpers for the MCP service."""
from __future__ import annotations
import os
from functools import lru_cache
from typing import Literal
from typing import Any
from pydantic import Field, SecretStr
from pydantic.networks import AnyHttpUrl
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import BaseModel, ConfigDict, Field, ValidationError
class Settings(BaseSettings):
"""Configuration for the MCP server."""
class Settings(BaseModel):
"""Application settings sourced from environment variables."""
backend_base_url: AnyHttpUrl = Field(
"http://springboot:8080",
description="Base URL for the OpenIsle backend service.",
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
backend_base_url: str = Field(
default="http://springboot:8080",
description="Base URL of the Spring Boot backend that provides search endpoints",
)
host: str = Field(
"0.0.0.0",
description="Host interface to bind when running with HTTP transports.",
connect_timeout: float = Field(
default=5.0,
ge=0.0,
description="Connection timeout when communicating with the backend (seconds)",
)
port: int = Field(
8085,
ge=1,
le=65535,
description="TCP port for HTTP transports.",
)
transport: Literal["stdio", "sse", "streamable-http"] = Field(
"streamable-http",
description="MCP transport to use when running the server.",
)
request_timeout: float = Field(
10.0,
gt=0,
description="Timeout (seconds) for backend search requests.",
)
access_token: SecretStr | None = Field(
default=None,
description=(
"Optional JWT bearer token used for authenticated backend calls. "
"When set, tools that support authentication will use this token "
"automatically unless an explicit token override is provided."
),
)
log_level: str = Field(
"INFO",
description=(
"Logging level for the MCP server (e.g. DEBUG, INFO, WARNING)."
),
read_timeout: float = Field(
default=10.0,
ge=0.0,
description="Read timeout when communicating with the backend (seconds)",
)
model_config = SettingsConfigDict(
env_prefix="OPENISLE_MCP_",
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
model_config = ConfigDict(extra="ignore")
@property
def normalized_backend_base_url(self) -> str:
"""Return the backend base URL without a trailing slash."""
return self.backend_base_url.rstrip("/")
ENV_MAPPING: dict[str, str] = {
"host": "MCP_HOST",
"port": "MCP_PORT",
"backend_base_url": "MCP_BACKEND_BASE_URL",
"connect_timeout": "MCP_CONNECT_TIMEOUT",
"read_timeout": "MCP_READ_TIMEOUT",
}
def _load_environment_values() -> dict[str, Any]:
values: dict[str, Any] = {}
for field, env_name in ENV_MAPPING.items():
value = os.getenv(env_name)
if value is None:
continue
values[field] = value
return values
@lru_cache(maxsize=1)
def get_settings() -> Settings:
"""Return cached application settings."""
"""Load and validate application settings."""
return Settings()
values = _load_environment_values()
try:
return Settings(**values)
except ValidationError as exc: # pragma: no cover - defensive branch
raise RuntimeError("Invalid MCP configuration") from exc
__all__ = ["Settings", "get_settings"]

View File

@@ -0,0 +1,38 @@
"""Pydantic models shared across the MCP service."""
from __future__ import annotations
from typing import Optional
from pydantic import BaseModel, ConfigDict, Field
class SearchResult(BaseModel):
"""Representation of a single search result entry."""
model_config = ConfigDict(extra="ignore")
type: Optional[str] = Field(default=None, description="Type of the result entry")
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
extra: Optional[str] = Field(default=None, description="Additional information about the result")
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
highlightedSubText: Optional[str] = Field(
default=None,
description="Highlighted secondary text segment",
)
highlightedExtra: Optional[str] = Field(
default=None,
description="Highlighted additional information",
)
class SearchResponse(BaseModel):
"""Response payload returned by the search endpoint."""
results: list[SearchResult] = Field(default_factory=list)
__all__ = ["SearchResult", "SearchResponse"]

View File

@@ -1,378 +0,0 @@
"""Pydantic models describing tool inputs and outputs."""
from __future__ import annotations
from datetime import datetime
from typing import Any, Optional
from pydantic import BaseModel, Field, ConfigDict, field_validator
class SearchResultItem(BaseModel):
"""A single search result entry."""
type: str = Field(description="Entity type for the result (post, user, tag, etc.).")
id: Optional[int] = Field(default=None, description="Identifier of the matched entity.")
text: Optional[str] = Field(default=None, description="Primary text associated with the result.")
sub_text: Optional[str] = Field(
default=None,
alias="subText",
description="Secondary text, e.g. a username or excerpt.",
)
extra: Optional[str] = Field(default=None, description="Additional contextual information.")
post_id: Optional[int] = Field(
default=None,
alias="postId",
description="Associated post identifier when relevant.",
)
highlighted_text: Optional[str] = Field(
default=None,
alias="highlightedText",
description="Highlighted snippet of the primary text if available.",
)
highlighted_sub_text: Optional[str] = Field(
default=None,
alias="highlightedSubText",
description="Highlighted snippet of the secondary text if available.",
)
highlighted_extra: Optional[str] = Field(
default=None,
alias="highlightedExtra",
description="Highlighted snippet of extra information if available.",
)
model_config = ConfigDict(populate_by_name=True)
class SearchResponse(BaseModel):
"""Structured response returned by the search tool."""
keyword: str = Field(description="The keyword that was searched.")
total: int = Field(description="Total number of matches returned by the backend.")
results: list[SearchResultItem] = Field(
default_factory=list,
description="Ordered collection of search results.",
)
class AuthorInfo(BaseModel):
"""Summary of a post or comment author."""
id: Optional[int] = Field(default=None, description="Author identifier.")
username: Optional[str] = Field(default=None, description="Author username.")
avatar: Optional[str] = Field(default=None, description="URL of the author's avatar.")
display_medal: Optional[str] = Field(
default=None,
alias="displayMedal",
description="Medal displayed next to the author, when available.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
class CategoryInfo(BaseModel):
"""Basic information about a post category."""
id: Optional[int] = Field(default=None, description="Category identifier.")
name: Optional[str] = Field(default=None, description="Category name.")
description: Optional[str] = Field(
default=None, description="Human friendly description of the category."
)
icon: Optional[str] = Field(default=None, description="Icon URL associated with the category.")
small_icon: Optional[str] = Field(
default=None,
alias="smallIcon",
description="Compact icon URL for the category.",
)
count: Optional[int] = Field(default=None, description="Number of posts within the category.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class TagInfo(BaseModel):
"""Details for a tag assigned to a post."""
id: Optional[int] = Field(default=None, description="Tag identifier.")
name: Optional[str] = Field(default=None, description="Tag name.")
description: Optional[str] = Field(default=None, description="Description of the tag.")
icon: Optional[str] = Field(default=None, description="Icon URL for the tag.")
small_icon: Optional[str] = Field(
default=None,
alias="smallIcon",
description="Compact icon URL for the tag.",
)
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="When the tag was created.",
)
count: Optional[int] = Field(default=None, description="Number of posts using the tag.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class ReactionInfo(BaseModel):
"""Representation of a reaction on a post or comment."""
id: Optional[int] = Field(default=None, description="Reaction identifier.")
type: Optional[str] = Field(default=None, description="Reaction type (emoji, like, etc.).")
user: Optional[str] = Field(default=None, description="Username of the reacting user.")
post_id: Optional[int] = Field(
default=None,
alias="postId",
description="Related post identifier when applicable.",
)
comment_id: Optional[int] = Field(
default=None,
alias="commentId",
description="Related comment identifier when applicable.",
)
message_id: Optional[int] = Field(
default=None,
alias="messageId",
description="Related message identifier when applicable.",
)
reward: Optional[int] = Field(default=None, description="Reward granted for the reaction, if any.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class CommentData(BaseModel):
"""Comment information returned by the backend."""
id: Optional[int] = Field(default=None, description="Comment identifier.")
content: Optional[str] = Field(default=None, description="Markdown content of the comment.")
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="Timestamp when the comment was created.",
)
pinned_at: Optional[datetime] = Field(
default=None,
alias="pinnedAt",
description="Timestamp when the comment was pinned, if applicable.",
)
author: Optional[AuthorInfo] = Field(default=None, description="Author of the comment.")
replies: list["CommentData"] = Field(
default_factory=list,
description="Nested replies associated with the comment.",
)
reactions: list[ReactionInfo] = Field(
default_factory=list,
description="Reactions applied to the comment.",
)
reward: Optional[int] = Field(default=None, description="Reward gained by posting the comment.")
point_reward: Optional[int] = Field(
default=None,
alias="pointReward",
description="Points rewarded for the comment.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
@field_validator("replies", "reactions", mode="before")
@classmethod
def _ensure_comment_lists(cls, value: Any) -> list[Any]:
"""Convert ``None`` payloads to empty lists for comment collections."""
if value is None:
return []
return value
class CommentReplyResult(BaseModel):
"""Structured response returned when replying to a comment."""
comment: CommentData = Field(description="Reply comment returned by the backend.")
class CommentCreateResult(BaseModel):
"""Structured response returned when creating a comment on a post."""
comment: CommentData = Field(description="Comment returned by the backend.")
class PostCreateResult(BaseModel):
"""Structured response returned when creating a new post."""
post: PostDetail = Field(description="Detailed post payload returned by the backend.")
class PostSummary(BaseModel):
"""Summary information for a post."""
id: Optional[int] = Field(default=None, description="Post identifier.")
title: Optional[str] = Field(default=None, description="Title of the post.")
content: Optional[str] = Field(default=None, description="Excerpt or content of the post.")
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="When the post was created.",
)
author: Optional[AuthorInfo] = Field(default=None, description="Author who created the post.")
category: Optional[CategoryInfo] = Field(default=None, description="Category of the post.")
tags: list[TagInfo] = Field(default_factory=list, description="Tags assigned to the post.")
views: Optional[int] = Field(default=None, description="Total view count for the post.")
comment_count: Optional[int] = Field(
default=None,
alias="commentCount",
description="Number of comments on the post.",
)
status: Optional[str] = Field(default=None, description="Workflow status of the post.")
pinned_at: Optional[datetime] = Field(
default=None,
alias="pinnedAt",
description="When the post was pinned, if ever.",
)
last_reply_at: Optional[datetime] = Field(
default=None,
alias="lastReplyAt",
description="Timestamp of the most recent reply.",
)
reactions: list[ReactionInfo] = Field(
default_factory=list,
description="Reactions received by the post.",
)
participants: list[AuthorInfo] = Field(
default_factory=list,
description="Users participating in the discussion.",
)
subscribed: Optional[bool] = Field(
default=None,
description="Whether the current user is subscribed to the post.",
)
reward: Optional[int] = Field(default=None, description="Reward granted for the post.")
point_reward: Optional[int] = Field(
default=None,
alias="pointReward",
description="Points granted for the post.",
)
type: Optional[str] = Field(default=None, description="Type of the post.")
lottery: Optional[dict[str, Any]] = Field(
default=None, description="Lottery information for the post."
)
poll: Optional[dict[str, Any]] = Field(
default=None, description="Poll information for the post."
)
rss_excluded: Optional[bool] = Field(
default=None,
alias="rssExcluded",
description="Whether the post is excluded from RSS feeds.",
)
closed: Optional[bool] = Field(default=None, description="Whether the post is closed for replies.")
visible_scope: Optional[str] = Field(
default=None,
alias="visibleScope",
description="Visibility scope configuration for the post.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
@field_validator("tags", "reactions", "participants", mode="before")
@classmethod
def _ensure_post_lists(cls, value: Any) -> list[Any]:
"""Normalize ``None`` values returned by the backend to empty lists."""
if value is None:
return []
return value
class RecentPostsResponse(BaseModel):
"""Structured response for the recent posts tool."""
minutes: int = Field(description="Time window, in minutes, used for the query.")
total: int = Field(description="Number of posts returned by the backend.")
posts: list[PostSummary] = Field(
default_factory=list,
description="Posts created within the requested time window.",
)
CommentData.model_rebuild()
class PostDetail(PostSummary):
"""Detailed information for a single post, including comments."""
comments: list[CommentData] = Field(
default_factory=list,
description="Comments that belong to the post.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
@field_validator("comments", mode="before")
@classmethod
def _ensure_comments_list(cls, value: Any) -> list[Any]:
"""Treat ``None`` comments payloads as empty lists."""
if value is None:
return []
return value
class NotificationData(BaseModel):
"""Unread notification payload returned by the backend."""
id: Optional[int] = Field(default=None, description="Notification identifier.")
type: Optional[str] = Field(default=None, description="Type of the notification.")
post: Optional[PostSummary] = Field(
default=None, description="Post associated with the notification if applicable."
)
comment: Optional[CommentData] = Field(
default=None, description="Comment referenced by the notification when available."
)
parent_comment: Optional[CommentData] = Field(
default=None,
alias="parentComment",
description="Parent comment for nested replies, when present.",
)
from_user: Optional[AuthorInfo] = Field(
default=None,
alias="fromUser",
description="User who triggered the notification.",
)
reaction_type: Optional[str] = Field(
default=None,
alias="reactionType",
description="Reaction type for reaction-based notifications.",
)
content: Optional[str] = Field(
default=None, description="Additional content or message for the notification."
)
approved: Optional[bool] = Field(
default=None, description="Approval status for moderation notifications."
)
read: Optional[bool] = Field(default=None, description="Whether the notification is read.")
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="Timestamp when the notification was created.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
class UnreadNotificationsResponse(BaseModel):
"""Structured response for unread notification queries."""
page: int = Field(description="Requested page index for the unread notifications.")
size: int = Field(description="Requested page size for the unread notifications.")
total: int = Field(description="Number of unread notifications returned in this page.")
notifications: list[NotificationData] = Field(
default_factory=list,
description="Unread notifications returned by the backend.",
)
class NotificationCleanupResult(BaseModel):
"""Structured response returned after marking notifications as read."""
processed_ids: list[int] = Field(
default_factory=list,
description="Identifiers that were marked as read in the backend.",
)
total_marked: int = Field(
description="Total number of notifications successfully marked as read.",
)

View File

@@ -1,345 +0,0 @@
"""HTTP client helpers for talking to the OpenIsle backend endpoints."""
from __future__ import annotations
import json
import logging
from typing import Any
import httpx
logger = logging.getLogger(__name__)
class SearchClient:
"""Client for calling the OpenIsle HTTP APIs used by the MCP server."""
def __init__(
self,
base_url: str,
*,
timeout: float = 10.0,
access_token: str | None = None,
) -> None:
self._base_url = base_url.rstrip("/")
self._timeout = timeout
self._client: httpx.AsyncClient | None = None
self._access_token = self._sanitize_token(access_token)
def _get_client(self) -> httpx.AsyncClient:
if self._client is None:
logger.debug(
"Creating httpx.AsyncClient for base URL %s with timeout %.2fs",
self._base_url,
self._timeout,
)
self._client = httpx.AsyncClient(
base_url=self._base_url,
timeout=self._timeout,
)
return self._client
@staticmethod
def _sanitize_token(token: str | None) -> str | None:
if token is None:
return None
stripped = token.strip()
return stripped or None
def update_access_token(self, token: str | None) -> None:
"""Update the default access token used for authenticated requests."""
self._access_token = self._sanitize_token(token)
if self._access_token:
logger.debug("Configured default access token for SearchClient requests.")
else:
logger.debug("Cleared default access token for SearchClient requests.")
def _resolve_token(self, token: str | None) -> str | None:
candidate = self._sanitize_token(token)
if candidate is not None:
return candidate
return self._access_token
def _require_token(self, token: str | None) -> str:
resolved = self._resolve_token(token)
if resolved is None:
raise ValueError(
"Authenticated request requires an access token. Provide a Bearer token "
"via the MCP Authorization header or configure a default token for the server."
)
return resolved
def _build_headers(
self,
*,
token: str | None = None,
accept: str = "application/json",
include_json: bool = False,
) -> dict[str, str]:
headers: dict[str, str] = {"Accept": accept}
resolved = self._resolve_token(token)
if resolved:
headers["Authorization"] = f"Bearer {resolved}"
if include_json:
headers["Content-Type"] = "application/json"
return headers
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
"""Call the global search endpoint and return the parsed JSON payload."""
client = self._get_client()
logger.debug("Calling global search with keyword=%s", keyword)
response = await client.get(
"/api/search/global",
params={"keyword": keyword},
headers=self._build_headers(),
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
logger.info(
"Global search returned %d results for keyword '%s'",
len(payload),
keyword,
)
return [self._ensure_dict(entry) for entry in payload]
async def reply_to_comment(
self,
comment_id: int,
content: str,
*,
token: str | None = None,
captcha: str | None = None,
) -> dict[str, Any]:
"""Reply to an existing comment and return the created reply."""
client = self._get_client()
resolved_token = self._require_token(token)
headers = self._build_headers(token=resolved_token, include_json=True)
payload: dict[str, Any] = {"content": content}
if captcha is not None:
stripped_captcha = captcha.strip()
if stripped_captcha:
payload["captcha"] = stripped_captcha
logger.debug(
"Posting reply to comment_id=%s (captcha=%s)",
comment_id,
bool(captcha),
)
response = await client.post(
f"/api/comments/{comment_id}/replies",
json=payload,
headers=headers,
)
response.raise_for_status()
body = self._ensure_dict(response.json())
logger.info("Reply to comment_id=%s succeeded with id=%s", comment_id, body.get("id"))
return body
async def reply_to_post(
self,
post_id: int,
content: str,
*,
token: str | None = None,
captcha: str | None = None,
) -> dict[str, Any]:
"""Create a comment on a post and return the backend payload."""
client = self._get_client()
resolved_token = self._require_token(token)
headers = self._build_headers(token=resolved_token, include_json=True)
payload: dict[str, Any] = {"content": content}
if captcha is not None:
stripped_captcha = captcha.strip()
if stripped_captcha:
payload["captcha"] = stripped_captcha
logger.debug(
"Posting comment to post_id=%s (captcha=%s)",
post_id,
bool(captcha),
)
response = await client.post(
f"/api/posts/{post_id}/comments",
json=payload,
headers=headers,
)
response.raise_for_status()
body = self._ensure_dict(response.json())
logger.info("Reply to post_id=%s succeeded with id=%s", post_id, body.get("id"))
return body
async def create_post(
self,
payload: dict[str, Any],
*,
token: str | None = None,
) -> dict[str, Any]:
"""Create a new post and return the detailed backend payload."""
client = self._get_client()
resolved_token = self._require_token(token)
headers = self._build_headers(token=resolved_token, include_json=True)
logger.debug(
"Creating post with category_id=%s and %d tag(s)",
payload.get("categoryId"),
len(payload.get("tagIds", []) if isinstance(payload.get("tagIds"), list) else []),
)
response = await client.post(
"/api/posts",
json=payload,
headers=headers,
)
response.raise_for_status()
body = self._ensure_dict(response.json())
logger.info("Post creation succeeded with id=%s", body.get("id"))
return body
async def recent_posts(self, minutes: int) -> list[dict[str, Any]]:
"""Return posts created within the given timeframe."""
client = self._get_client()
logger.debug(
"Fetching recent posts within last %s minutes",
minutes,
)
response = await client.get(
"/api/posts/recent",
params={"minutes": minutes},
headers=self._build_headers(),
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(
f"Unexpected response format from recent posts endpoint: {formatted}"
)
logger.info(
"Fetched %d recent posts for window=%s minutes",
len(payload),
minutes,
)
return [self._ensure_dict(entry) for entry in payload]
async def get_post(self, post_id: int, token: str | None = None) -> dict[str, Any]:
"""Retrieve the detailed payload for a single post."""
client = self._get_client()
headers = self._build_headers(token=token)
logger.debug("Fetching post details for post_id=%s", post_id)
response = await client.get(f"/api/posts/{post_id}", headers=headers)
response.raise_for_status()
body = self._ensure_dict(response.json())
logger.info(
"Retrieved post_id=%s successfully with %d top-level comments",
post_id,
len(body.get("comments", []) if isinstance(body.get("comments"), list) else []),
)
return body
async def list_unread_notifications(
self,
*,
page: int = 0,
size: int = 30,
token: str | None = None,
) -> list[dict[str, Any]]:
"""Return unread notifications for the authenticated user."""
client = self._get_client()
resolved_token = self._require_token(token)
logger.debug(
"Fetching unread notifications with page=%s, size=%s",
page,
size,
)
response = await client.get(
"/api/notifications/unread",
params={"page": page, "size": size},
headers=self._build_headers(token=resolved_token),
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(
"Unexpected response format from unread notifications endpoint: "
f"{formatted}"
)
logger.info(
"Fetched %d unread notifications (page=%s, size=%s)",
len(payload),
page,
size,
)
return [self._ensure_dict(entry) for entry in payload]
async def mark_notifications_read(
self,
ids: list[int],
*,
token: str | None = None,
) -> None:
"""Mark the provided notifications as read for the authenticated user."""
if not ids:
raise ValueError(
"At least one notification identifier must be provided to mark as read."
)
sanitized_ids: list[int] = []
for value in ids:
if isinstance(value, bool):
raise ValueError("Notification identifiers must be integers, not booleans.")
try:
converted = int(value)
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
raise ValueError(
"Notification identifiers must be integers."
) from exc
if converted <= 0:
raise ValueError(
"Notification identifiers must be positive integers."
)
sanitized_ids.append(converted)
client = self._get_client()
resolved_token = self._require_token(token)
logger.debug(
"Marking %d notifications as read: ids=%s",
len(sanitized_ids),
sanitized_ids,
)
response = await client.post(
"/api/notifications/read",
json={"ids": sanitized_ids},
headers=self._build_headers(token=resolved_token, include_json=True),
)
response.raise_for_status()
logger.info(
"Successfully marked %d notifications as read.",
len(sanitized_ids),
)
async def aclose(self) -> None:
"""Dispose of the underlying HTTP client."""
if self._client is not None:
await self._client.aclose()
self._client = None
logger.debug("Closed httpx.AsyncClient for SearchClient.")
@staticmethod
def _ensure_dict(entry: Any) -> dict[str, Any]:
if not isinstance(entry, dict):
raise ValueError(f"Expected JSON object, got: {type(entry)!r}")
return entry

View File

File diff suppressed because it is too large Load Diff

View File

@@ -100,28 +100,10 @@ server {
# auth_basic_user_file /etc/nginx/.htpasswd;
}
# ---------- WEBSOCKET GATEWAY TO :8082 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8085;
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;

View File

@@ -8,8 +8,11 @@ server {
listen 443 ssl;
server_name staging.open-isle.com www.staging.open-isle.com;
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
# ssl_certificate /etc/letsencrypt/live/open-isle.com/fullchain.pem;
# ssl_certificate_key /etc/letsencrypt/live/open-isle.com/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
@@ -37,13 +40,59 @@ server {
add_header X-Upstream $upstream_addr always;
}
# 1) 原生 WebSocket
location ^~ /api/ws {
proxy_pass http://127.0.0.1:8081; # 不要尾随 /,保留原样 URI
proxy_http_version 1.1;
# 升级所需
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
# 统一透传这些头(你在 /api/ 有,/api/ws 也要有)
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
}
# 2) SockJS包含 /info、/iframe.html、/.../websocket 等)
location ^~ /api/sockjs {
proxy_pass http://127.0.0.1:8081;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
# 如要同源 iframe 回退,下面两行二选一(或者交给 Spring Security 的 sameOrigin
# proxy_hide_header X-Frame-Options;
# add_header X-Frame-Options "SAMEORIGIN" always;
}
# ---------- API ----------
location /api/ {
proxy_pass http://127.0.0.1:8081/api/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
@@ -60,6 +109,7 @@ server {
proxy_cache_bypass 1;
}
# ---------- WEBSOCKET GATEWAY TO :8083 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8083/;
proxy_http_version 1.1;
@@ -80,24 +130,4 @@ server {
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8086;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
}
}