mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-06 23:21:16 +08:00
Compare commits
38 Commits
codex/add-
...
codex/upda
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02645af321 | ||
|
|
c3a175f13f | ||
|
|
0821d447f7 | ||
|
|
257794ca00 | ||
|
|
6a527de3eb | ||
|
|
2313f90eb3 | ||
|
|
7fde984e7d | ||
|
|
fc41e605e4 | ||
|
|
042e5fdbe6 | ||
|
|
629442bff6 | ||
|
|
7798910be0 | ||
|
|
6f036eb4fe | ||
|
|
56fc05cb3c | ||
|
|
a55a15659b | ||
|
|
ccf6e0c7ce | ||
|
|
87677f5968 | ||
|
|
fd93a2dc61 | ||
|
|
80f862a226 | ||
|
|
26bb85f4d4 | ||
|
|
398b4b482f | ||
|
|
2cfb302981 | ||
|
|
e75bd76b71 | ||
|
|
99c3ac1837 | ||
|
|
749ab560ff | ||
|
|
541ad4d149 | ||
|
|
03eb027ea4 | ||
|
|
4194b2be91 | ||
|
|
9dadaad5ba | ||
|
|
d4b3400c5f | ||
|
|
e585100625 | ||
|
|
e94471b53e | ||
|
|
997dacdbe6 | ||
|
|
c01349a436 | ||
|
|
4cf48f9157 | ||
|
|
796afbe612 | ||
|
|
dca14390ca | ||
|
|
39875acd35 | ||
|
|
62edc75735 |
29
.github/workflows/coffee-bot.yml
vendored
Normal file
29
.github/workflows/coffee-bot.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Coffee Bot
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-coffee-bot:
|
||||
environment: Bots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --no-save @openai/agents tsx typescript
|
||||
|
||||
- name: Run coffee bot
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
|
||||
run: npx tsx bots/instance/coffee_bot.ts
|
||||
29
.github/workflows/reply-bots.yml
vendored
Normal file
29
.github/workflows/reply-bots.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Reply Bots
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "*/30 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-reply-bot:
|
||||
environment: Bots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --no-save @openai/agents tsx typescript
|
||||
|
||||
- name: Run reply bot
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
|
||||
run: npx tsx bots/instance/reply_bot.ts
|
||||
139
bots/bot_father.ts
Normal file
139
bots/bot_father.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { Agent, Runner, hostedMcpTool, withTrace } from "@openai/agents";
|
||||
|
||||
export type WorkflowInput = { input_as_text: string };
|
||||
|
||||
export abstract class BotFather {
|
||||
protected readonly allowedMcpTools = [
|
||||
"search",
|
||||
"create_post",
|
||||
"reply_to_post",
|
||||
"reply_to_comment",
|
||||
"recent_posts",
|
||||
"get_post",
|
||||
"list_unread_messages",
|
||||
"mark_notifications_read",
|
||||
"create_post",
|
||||
];
|
||||
|
||||
protected readonly mcp = hostedMcpTool({
|
||||
serverLabel: "openisle_mcp",
|
||||
serverUrl: "https://www.open-isle.com/mcp",
|
||||
allowedTools: this.allowedMcpTools,
|
||||
requireApproval: "never",
|
||||
});
|
||||
|
||||
protected readonly openisleToken = process.env.OPENISLE_TOKEN ?? "";
|
||||
protected readonly agent: Agent;
|
||||
|
||||
constructor(protected readonly name: string) {
|
||||
console.log(`✅ ${this.name} starting...`);
|
||||
console.log(
|
||||
"🛠️ Configured Hosted MCP tools:",
|
||||
this.allowedMcpTools.join(", ")
|
||||
);
|
||||
|
||||
console.log(
|
||||
this.openisleToken
|
||||
? "🔑 OPENISLE_TOKEN detected in environment."
|
||||
: "🔓 OPENISLE_TOKEN not set; agent will request it if required."
|
||||
);
|
||||
|
||||
this.agent = new Agent({
|
||||
name: this.name,
|
||||
instructions: this.buildInstructions(),
|
||||
tools: [this.mcp],
|
||||
model: "gpt-4o",
|
||||
modelSettings: {
|
||||
temperature: 0.7,
|
||||
topP: 1,
|
||||
maxTokens: 2048,
|
||||
toolChoice: "auto",
|
||||
store: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
protected buildInstructions(): string {
|
||||
const instructions = [
|
||||
...this.getBaseInstructions(),
|
||||
...this.getAdditionalInstructions(),
|
||||
].filter(Boolean);
|
||||
return instructions.join("\n");
|
||||
}
|
||||
|
||||
protected getBaseInstructions(): string[] {
|
||||
return [
|
||||
"You are a helpful assistant for https://www.open-isle.com.",
|
||||
"Finish tasks end-to-end before replying. If multiple MCP tools are needed, call them sequentially until the task is truly done.",
|
||||
"When presenting the result, reply in Chinese with a concise summary and include any important URLs or IDs.",
|
||||
this.openisleToken
|
||||
? `If tools require auth, use this token exactly where the tool schema expects it: ${this.openisleToken}`
|
||||
: "If a tool requires auth, ask me to provide OPENISLE_TOKEN via env.",
|
||||
"After finishing replies, call mark_notifications_read with all processed notification IDs to keep the inbox clean.",
|
||||
];
|
||||
}
|
||||
|
||||
protected getAdditionalInstructions(): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
protected createRunner(): Runner {
|
||||
return new Runner({
|
||||
workflowName: this.name,
|
||||
traceMetadata: {
|
||||
__trace_source__: "agent-builder",
|
||||
workflow_id: "wf_69003cbd47e08190928745d3c806c0b50d1a01cfae052be8",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async runWorkflow(workflow: WorkflowInput) {
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
throw new Error("Missing OPENAI_API_KEY");
|
||||
}
|
||||
|
||||
const runner = this.createRunner();
|
||||
|
||||
return await withTrace(`${this.name} run`, async () => {
|
||||
const preview = workflow.input_as_text.trim();
|
||||
console.log(
|
||||
"📝 Received workflow input (preview):",
|
||||
preview.length > 200 ? `${preview.slice(0, 200)}…` : preview
|
||||
);
|
||||
|
||||
console.log("🚦 Starting agent run with maxTurns=16...");
|
||||
const result = await runner.run(this.agent, workflow.input_as_text, {
|
||||
maxTurns: 16,
|
||||
});
|
||||
|
||||
console.log("📬 Agent run completed. Result keys:", Object.keys(result));
|
||||
|
||||
if (!result.finalOutput) {
|
||||
throw new Error("Agent result is undefined (no final output).");
|
||||
}
|
||||
|
||||
const openisleBotResult = { output_text: String(result.finalOutput) };
|
||||
|
||||
console.log(
|
||||
"🤖 Agent result (length=%d):\n%s",
|
||||
openisleBotResult.output_text.length,
|
||||
openisleBotResult.output_text
|
||||
);
|
||||
return openisleBotResult;
|
||||
});
|
||||
}
|
||||
|
||||
protected abstract getCliQuery(): string;
|
||||
|
||||
public async runCli(): Promise<void> {
|
||||
try {
|
||||
const query = this.getCliQuery();
|
||||
console.log("🔍 Running workflow...");
|
||||
await this.runWorkflow({ input_as_text: query });
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
console.error("❌ Agent failed:", err?.stack || err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
63
bots/instance/coffee_bot.ts
Normal file
63
bots/instance/coffee_bot.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { BotFather, WorkflowInput } from "../bot_father";
|
||||
|
||||
const WEEKDAY_NAMES = ["日", "一", "二", "三", "四", "五", "六"] as const;
|
||||
|
||||
class CoffeeBot extends BotFather {
|
||||
constructor() {
|
||||
super("Coffee Bot");
|
||||
}
|
||||
|
||||
protected override getAdditionalInstructions(): string[] {
|
||||
return [
|
||||
"You are responsible for 发布每日抽奖早安贴。",
|
||||
"创建帖子时,确保标题、奖品信息、开奖时间以及领奖方式完全符合 CLI 查询提供的细节。",
|
||||
"正文需亲切友好,简洁明了,鼓励社区成员互动。",
|
||||
"开奖说明需明确告知中奖者需私聊站长 @nagisa 领取奖励。",
|
||||
"确保只发布一个帖子,避免重复调用 create_post。",
|
||||
];
|
||||
}
|
||||
|
||||
protected override getCliQuery(): string {
|
||||
const now = new Date();
|
||||
const beijingNow = new Date(
|
||||
now.toLocaleString("en-US", { timeZone: "Asia/Shanghai" })
|
||||
);
|
||||
const weekday = WEEKDAY_NAMES[beijingNow.getDay()];
|
||||
|
||||
const drawTime = new Date(beijingNow);
|
||||
drawTime.setHours(15, 0, 0, 0);
|
||||
const drawTimeText = drawTime
|
||||
.toLocaleTimeString("zh-CN", {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
hour12: false,
|
||||
timeZone: "Asia/Shanghai",
|
||||
})
|
||||
.replace(/^24:/, "00:");
|
||||
|
||||
return `
|
||||
请立即在 https://www.open-isle.com 使用 create_post 发表一篇全新帖子,遵循以下要求:
|
||||
1. 标题固定为「大家星期${weekday}早安--抽一杯咖啡」。
|
||||
2. 正文包含:
|
||||
- 亲切的早安问候;
|
||||
- 明确奖品写作“Coffee x 1”;
|
||||
- 奖品图片链接:https://openisle-1307107697.cos.accelerate.myqcloud.com/dynamic_assert/0d6a9b33e9ca4fe5a90540187d3f9ecb.png;
|
||||
- 公布开奖时间为今天下午 15:00(北京时间,写成 ${drawTimeText});
|
||||
- 标注“领奖请私聊站长 @nagisa”;
|
||||
- 鼓励大家留言互动。
|
||||
3. 帖子语言使用简体中文,格式可用 Markdown,使关键信息醒目。
|
||||
4. 完成后只输出“已发布咖啡抽奖贴”,不额外生成总结。
|
||||
`.trim();
|
||||
}
|
||||
}
|
||||
|
||||
const coffeeBot = new CoffeeBot();
|
||||
|
||||
export const runWorkflow = async (workflow: WorkflowInput) => {
|
||||
return coffeeBot.runWorkflow(workflow);
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
coffeeBot.runCli();
|
||||
}
|
||||
|
||||
39
bots/instance/reply_bot.ts
Normal file
39
bots/instance/reply_bot.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
// reply_bot.ts
|
||||
import { BotFather, WorkflowInput } from "../bot_father";
|
||||
|
||||
class ReplyBot extends BotFather {
|
||||
constructor() {
|
||||
super("OpenIsle Bot");
|
||||
}
|
||||
|
||||
protected override getAdditionalInstructions(): string[] {
|
||||
return [
|
||||
"You are a helpful and cute assistant for https://www.open-isle.com. Keep the lovable tone with plentiful kawaii kaomoji (颜表情) such as (๑˃ᴗ˂)ﻭ, (•̀ω•́)✧, (。•ᴗ-)_♡, (⁎⁍̴̛ᴗ⁍̴̛⁎), etc., while staying professional and informative.",
|
||||
"OpenIsle 是一个由 Spring Boot + Vue 3 打造的开源社区平台,提供注册登录、OAuth 登录(Google/GitHub/Discord/Twitter)、帖子与评论互动、标签分类、草稿、统计分析、通知消息、全局搜索、Markdown 支持、图片上传(默认腾讯云 COS)、浏览器推送、DiceBear 头像等功能,旨在帮助团队快速搭建属于自己的技术社区。",
|
||||
"回复时请主动结合上述站点背景,为用户提供有洞察力、可执行的建议或答案,并在需要时引用官网 https://www.open-isle.com、GitHub 仓库 https://github.com/nagisa77/OpenIsle 或相关文档链接,避免空泛的安慰或套话。",
|
||||
"When presenting the result, reply in Chinese with a concise yet content-rich summary filled with kaomoji,并清晰列出关键结论、操作步骤、重要 URL 或 ID,确保用户能直接采取行动。",
|
||||
];
|
||||
}
|
||||
|
||||
protected override getCliQuery(): string {
|
||||
return `
|
||||
【AUTO】无需确认,自动处理所有未读的提及与评论:
|
||||
1)调用 list_unread_messages;
|
||||
2)依次处理每条“提及/评论”:如需上下文则使用 get_post 获取,生成简明中文回复;如有 commentId 则用 reply_to_comment,否则用 reply_to_post;
|
||||
3)跳过关注和系统事件;
|
||||
4)保证幂等性:如该贴最后一条是你自己发的回复,则跳过;
|
||||
5)调用 mark_notifications_read,传入本次已处理的通知 ID 清理已读;
|
||||
6)最多只处理最新10条;结束时仅输出简要摘要(包含URL或ID)。
|
||||
`.trim();
|
||||
}
|
||||
}
|
||||
|
||||
const replyBot = new ReplyBot();
|
||||
|
||||
export const runWorkflow = async (workflow: WorkflowInput) => {
|
||||
return replyBot.runWorkflow(workflow);
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
replyBot.runCli();
|
||||
}
|
||||
@@ -31,9 +31,12 @@ By default the server listens on port `8085` and serves MCP over Streamable HTTP
|
||||
| Tool | Description |
|
||||
| --- | --- |
|
||||
| `search` | Perform a global search against the OpenIsle backend. |
|
||||
| `create_post` | Publish a new post using a JWT token. |
|
||||
| `reply_to_post` | Create a new comment on a post using a JWT token. |
|
||||
| `reply_to_comment` | Reply to an existing comment using a JWT token. |
|
||||
| `recent_posts` | Retrieve posts created within the last *N* minutes. |
|
||||
|
||||
The tools return structured data mirroring the backend DTOs, including highlighted snippets for
|
||||
search results, the full comment payload for replies, and detailed metadata for recent posts.
|
||||
search results, the full comment payload for post replies and comment replies, and detailed
|
||||
metadata for recent posts.
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from functools import lru_cache
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic import Field, SecretStr
|
||||
from pydantic.networks import AnyHttpUrl
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
@@ -36,6 +36,20 @@ class Settings(BaseSettings):
|
||||
gt=0,
|
||||
description="Timeout (seconds) for backend search requests.",
|
||||
)
|
||||
access_token: SecretStr | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token used for authenticated backend calls. "
|
||||
"When set, tools that support authentication will use this token "
|
||||
"automatically unless an explicit token override is provided."
|
||||
),
|
||||
)
|
||||
log_level: str = Field(
|
||||
"INFO",
|
||||
description=(
|
||||
"Logging level for the MCP server (e.g. DEBUG, INFO, WARNING)."
|
||||
),
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_prefix="OPENISLE_MCP_",
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from pydantic import BaseModel, Field, ConfigDict, field_validator
|
||||
|
||||
|
||||
class SearchResultItem(BaseModel):
|
||||
@@ -170,6 +170,15 @@ class CommentData(BaseModel):
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("replies", "reactions", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comment_lists(cls, value: Any) -> list[Any]:
|
||||
"""Convert ``None`` payloads to empty lists for comment collections."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class CommentReplyResult(BaseModel):
|
||||
"""Structured response returned when replying to a comment."""
|
||||
@@ -177,6 +186,18 @@ class CommentReplyResult(BaseModel):
|
||||
comment: CommentData = Field(description="Reply comment returned by the backend.")
|
||||
|
||||
|
||||
class CommentCreateResult(BaseModel):
|
||||
"""Structured response returned when creating a comment on a post."""
|
||||
|
||||
comment: CommentData = Field(description="Comment returned by the backend.")
|
||||
|
||||
|
||||
class PostCreateResult(BaseModel):
|
||||
"""Structured response returned when creating a new post."""
|
||||
|
||||
post: PostDetail = Field(description="Detailed post payload returned by the backend.")
|
||||
|
||||
|
||||
class PostSummary(BaseModel):
|
||||
"""Summary information for a post."""
|
||||
|
||||
@@ -247,6 +268,15 @@ class PostSummary(BaseModel):
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("tags", "reactions", "participants", mode="before")
|
||||
@classmethod
|
||||
def _ensure_post_lists(cls, value: Any) -> list[Any]:
|
||||
"""Normalize ``None`` values returned by the backend to empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class RecentPostsResponse(BaseModel):
|
||||
"""Structured response for the recent posts tool."""
|
||||
@@ -260,3 +290,89 @@ class RecentPostsResponse(BaseModel):
|
||||
|
||||
|
||||
CommentData.model_rebuild()
|
||||
|
||||
|
||||
class PostDetail(PostSummary):
|
||||
"""Detailed information for a single post, including comments."""
|
||||
|
||||
comments: list[CommentData] = Field(
|
||||
default_factory=list,
|
||||
description="Comments that belong to the post.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("comments", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comments_list(cls, value: Any) -> list[Any]:
|
||||
"""Treat ``None`` comments payloads as empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class NotificationData(BaseModel):
|
||||
"""Unread notification payload returned by the backend."""
|
||||
|
||||
id: Optional[int] = Field(default=None, description="Notification identifier.")
|
||||
type: Optional[str] = Field(default=None, description="Type of the notification.")
|
||||
post: Optional[PostSummary] = Field(
|
||||
default=None, description="Post associated with the notification if applicable."
|
||||
)
|
||||
comment: Optional[CommentData] = Field(
|
||||
default=None, description="Comment referenced by the notification when available."
|
||||
)
|
||||
parent_comment: Optional[CommentData] = Field(
|
||||
default=None,
|
||||
alias="parentComment",
|
||||
description="Parent comment for nested replies, when present.",
|
||||
)
|
||||
from_user: Optional[AuthorInfo] = Field(
|
||||
default=None,
|
||||
alias="fromUser",
|
||||
description="User who triggered the notification.",
|
||||
)
|
||||
reaction_type: Optional[str] = Field(
|
||||
default=None,
|
||||
alias="reactionType",
|
||||
description="Reaction type for reaction-based notifications.",
|
||||
)
|
||||
content: Optional[str] = Field(
|
||||
default=None, description="Additional content or message for the notification."
|
||||
)
|
||||
approved: Optional[bool] = Field(
|
||||
default=None, description="Approval status for moderation notifications."
|
||||
)
|
||||
read: Optional[bool] = Field(default=None, description="Whether the notification is read.")
|
||||
created_at: Optional[datetime] = Field(
|
||||
default=None,
|
||||
alias="createdAt",
|
||||
description="Timestamp when the notification was created.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
|
||||
class UnreadNotificationsResponse(BaseModel):
|
||||
"""Structured response for unread notification queries."""
|
||||
|
||||
page: int = Field(description="Requested page index for the unread notifications.")
|
||||
size: int = Field(description="Requested page size for the unread notifications.")
|
||||
total: int = Field(description="Number of unread notifications returned in this page.")
|
||||
notifications: list[NotificationData] = Field(
|
||||
default_factory=list,
|
||||
description="Unread notifications returned by the backend.",
|
||||
)
|
||||
|
||||
|
||||
class NotificationCleanupResult(BaseModel):
|
||||
"""Structured response returned after marking notifications as read."""
|
||||
|
||||
processed_ids: list[int] = Field(
|
||||
default_factory=list,
|
||||
description="Identifiers that were marked as read in the backend.",
|
||||
)
|
||||
total_marked: int = Field(
|
||||
description="Total number of notifications successfully marked as read.",
|
||||
)
|
||||
|
||||
@@ -3,38 +3,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchClient:
|
||||
"""Client for calling the OpenIsle HTTP APIs used by the MCP server."""
|
||||
|
||||
def __init__(self, base_url: str, *, timeout: float = 10.0) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
*,
|
||||
timeout: float = 10.0,
|
||||
access_token: str | None = None,
|
||||
) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
self._timeout = timeout
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
self._access_token = self._sanitize_token(access_token)
|
||||
|
||||
def _get_client(self) -> httpx.AsyncClient:
|
||||
if self._client is None:
|
||||
self._client = httpx.AsyncClient(base_url=self._base_url, timeout=self._timeout)
|
||||
logger.debug(
|
||||
"Creating httpx.AsyncClient for base URL %s with timeout %.2fs",
|
||||
self._base_url,
|
||||
self._timeout,
|
||||
)
|
||||
self._client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
timeout=self._timeout,
|
||||
)
|
||||
return self._client
|
||||
|
||||
@staticmethod
|
||||
def _sanitize_token(token: str | None) -> str | None:
|
||||
if token is None:
|
||||
return None
|
||||
stripped = token.strip()
|
||||
return stripped or None
|
||||
|
||||
def update_access_token(self, token: str | None) -> None:
|
||||
"""Update the default access token used for authenticated requests."""
|
||||
|
||||
self._access_token = self._sanitize_token(token)
|
||||
if self._access_token:
|
||||
logger.debug("Configured default access token for SearchClient requests.")
|
||||
else:
|
||||
logger.debug("Cleared default access token for SearchClient requests.")
|
||||
|
||||
def _resolve_token(self, token: str | None) -> str | None:
|
||||
candidate = self._sanitize_token(token)
|
||||
if candidate is not None:
|
||||
return candidate
|
||||
return self._access_token
|
||||
|
||||
def _require_token(self, token: str | None) -> str:
|
||||
resolved = self._resolve_token(token)
|
||||
if resolved is None:
|
||||
raise ValueError(
|
||||
"Authenticated request requires an access token but none was provided."
|
||||
)
|
||||
return resolved
|
||||
|
||||
def _build_headers(
|
||||
self,
|
||||
*,
|
||||
token: str | None = None,
|
||||
accept: str = "application/json",
|
||||
include_json: bool = False,
|
||||
) -> dict[str, str]:
|
||||
headers: dict[str, str] = {"Accept": accept}
|
||||
resolved = self._resolve_token(token)
|
||||
if resolved:
|
||||
headers["Authorization"] = f"Bearer {resolved}"
|
||||
if include_json:
|
||||
headers["Content-Type"] = "application/json"
|
||||
return headers
|
||||
|
||||
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
|
||||
"""Call the global search endpoint and return the parsed JSON payload."""
|
||||
|
||||
client = self._get_client()
|
||||
logger.debug("Calling global search with keyword=%s", keyword)
|
||||
response = await client.get(
|
||||
"/api/search/global",
|
||||
params={"keyword": keyword},
|
||||
headers={"Accept": "application/json"},
|
||||
headers=self._build_headers(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list):
|
||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
||||
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
|
||||
logger.info(
|
||||
"Global search returned %d results for keyword '%s'",
|
||||
len(payload),
|
||||
keyword,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def reply_to_comment(
|
||||
@@ -47,33 +117,101 @@ class SearchClient:
|
||||
"""Reply to an existing comment and return the created reply."""
|
||||
|
||||
client = self._get_client()
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {token}",
|
||||
}
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
payload: dict[str, Any] = {"content": content}
|
||||
if captcha is not None:
|
||||
stripped_captcha = captcha.strip()
|
||||
if stripped_captcha:
|
||||
payload["captcha"] = stripped_captcha
|
||||
|
||||
logger.debug(
|
||||
"Posting reply to comment_id=%s (captcha=%s)",
|
||||
comment_id,
|
||||
bool(captcha),
|
||||
)
|
||||
response = await client.post(
|
||||
f"/api/comments/{comment_id}/replies",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return self._ensure_dict(response.json())
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Reply to comment_id=%s succeeded with id=%s", comment_id, body.get("id"))
|
||||
return body
|
||||
|
||||
async def reply_to_post(
|
||||
self,
|
||||
post_id: int,
|
||||
token: str,
|
||||
content: str,
|
||||
captcha: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create a comment on a post and return the backend payload."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
payload: dict[str, Any] = {"content": content}
|
||||
if captcha is not None:
|
||||
stripped_captcha = captcha.strip()
|
||||
if stripped_captcha:
|
||||
payload["captcha"] = stripped_captcha
|
||||
|
||||
logger.debug(
|
||||
"Posting comment to post_id=%s (captcha=%s)",
|
||||
post_id,
|
||||
bool(captcha),
|
||||
)
|
||||
response = await client.post(
|
||||
f"/api/posts/{post_id}/comments",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Reply to post_id=%s succeeded with id=%s", post_id, body.get("id"))
|
||||
return body
|
||||
|
||||
async def create_post(
|
||||
self,
|
||||
payload: dict[str, Any],
|
||||
*,
|
||||
token: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create a new post and return the detailed backend payload."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
||||
|
||||
logger.debug(
|
||||
"Creating post with category_id=%s and %d tag(s)",
|
||||
payload.get("categoryId"),
|
||||
len(payload.get("tagIds", []) if isinstance(payload.get("tagIds"), list) else []),
|
||||
)
|
||||
response = await client.post(
|
||||
"/api/posts",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info("Post creation succeeded with id=%s", body.get("id"))
|
||||
return body
|
||||
|
||||
async def recent_posts(self, minutes: int) -> list[dict[str, Any]]:
|
||||
"""Return posts created within the given timeframe."""
|
||||
|
||||
client = self._get_client()
|
||||
logger.debug(
|
||||
"Fetching recent posts within last %s minutes",
|
||||
minutes,
|
||||
)
|
||||
response = await client.get(
|
||||
"/api/posts/recent",
|
||||
params={"minutes": minutes},
|
||||
headers={"Accept": "application/json"},
|
||||
headers=self._build_headers(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
@@ -82,14 +220,120 @@ class SearchClient:
|
||||
raise ValueError(
|
||||
f"Unexpected response format from recent posts endpoint: {formatted}"
|
||||
)
|
||||
logger.info(
|
||||
"Fetched %d recent posts for window=%s minutes",
|
||||
len(payload),
|
||||
minutes,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def get_post(self, post_id: int, token: str | None = None) -> dict[str, Any]:
|
||||
"""Retrieve the detailed payload for a single post."""
|
||||
|
||||
client = self._get_client()
|
||||
headers = self._build_headers(token=token)
|
||||
logger.debug("Fetching post details for post_id=%s", post_id)
|
||||
response = await client.get(f"/api/posts/{post_id}", headers=headers)
|
||||
response.raise_for_status()
|
||||
body = self._ensure_dict(response.json())
|
||||
logger.info(
|
||||
"Retrieved post_id=%s successfully with %d top-level comments",
|
||||
post_id,
|
||||
len(body.get("comments", []) if isinstance(body.get("comments"), list) else []),
|
||||
)
|
||||
return body
|
||||
|
||||
async def list_unread_notifications(
|
||||
self,
|
||||
*,
|
||||
page: int = 0,
|
||||
size: int = 30,
|
||||
token: str | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Return unread notifications for the authenticated user."""
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
logger.debug(
|
||||
"Fetching unread notifications with page=%s, size=%s",
|
||||
page,
|
||||
size,
|
||||
)
|
||||
response = await client.get(
|
||||
"/api/notifications/unread",
|
||||
params={"page": page, "size": size},
|
||||
headers=self._build_headers(token=resolved_token),
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list):
|
||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
||||
raise ValueError(
|
||||
"Unexpected response format from unread notifications endpoint: "
|
||||
f"{formatted}"
|
||||
)
|
||||
logger.info(
|
||||
"Fetched %d unread notifications (page=%s, size=%s)",
|
||||
len(payload),
|
||||
page,
|
||||
size,
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def mark_notifications_read(
|
||||
self,
|
||||
ids: list[int],
|
||||
*,
|
||||
token: str | None = None,
|
||||
) -> None:
|
||||
"""Mark the provided notifications as read for the authenticated user."""
|
||||
|
||||
if not ids:
|
||||
raise ValueError(
|
||||
"At least one notification identifier must be provided to mark as read."
|
||||
)
|
||||
|
||||
sanitized_ids: list[int] = []
|
||||
for value in ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Notification identifiers must be integers, not booleans.")
|
||||
try:
|
||||
converted = int(value)
|
||||
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
|
||||
raise ValueError(
|
||||
"Notification identifiers must be integers."
|
||||
) from exc
|
||||
if converted <= 0:
|
||||
raise ValueError(
|
||||
"Notification identifiers must be positive integers."
|
||||
)
|
||||
sanitized_ids.append(converted)
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
logger.debug(
|
||||
"Marking %d notifications as read: ids=%s",
|
||||
len(sanitized_ids),
|
||||
sanitized_ids,
|
||||
)
|
||||
response = await client.post(
|
||||
"/api/notifications/read",
|
||||
json={"ids": sanitized_ids},
|
||||
headers=self._build_headers(token=resolved_token, include_json=True),
|
||||
)
|
||||
response.raise_for_status()
|
||||
logger.info(
|
||||
"Successfully marked %d notifications as read.",
|
||||
len(sanitized_ids),
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
"""Dispose of the underlying HTTP client."""
|
||||
|
||||
if self._client is not None:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
logger.debug("Closed httpx.AsyncClient for SearchClient.")
|
||||
|
||||
@staticmethod
|
||||
def _ensure_dict(entry: Any) -> dict[str, Any]:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Annotated
|
||||
|
||||
@@ -12,8 +13,14 @@ from pydantic import Field as PydanticField
|
||||
|
||||
from .config import get_settings
|
||||
from .schemas import (
|
||||
CommentCreateResult,
|
||||
CommentData,
|
||||
CommentReplyResult,
|
||||
NotificationData,
|
||||
NotificationCleanupResult,
|
||||
UnreadNotificationsResponse,
|
||||
PostDetail,
|
||||
PostCreateResult,
|
||||
PostSummary,
|
||||
RecentPostsResponse,
|
||||
SearchResponse,
|
||||
@@ -22,8 +29,26 @@ from .schemas import (
|
||||
from .search_client import SearchClient
|
||||
|
||||
settings = get_settings()
|
||||
if not logging.getLogger().handlers:
|
||||
logging.basicConfig(
|
||||
level=getattr(logging, settings.log_level.upper(), logging.INFO),
|
||||
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
|
||||
)
|
||||
else:
|
||||
logging.getLogger().setLevel(
|
||||
getattr(logging, settings.log_level.upper(), logging.INFO)
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
search_client = SearchClient(
|
||||
str(settings.backend_base_url), timeout=settings.request_timeout
|
||||
str(settings.backend_base_url),
|
||||
timeout=settings.request_timeout,
|
||||
access_token=(
|
||||
settings.access_token.get_secret_value()
|
||||
if settings.access_token is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -32,16 +57,19 @@ async def lifespan(_: FastMCP):
|
||||
"""Lifecycle hook that disposes shared resources when the server stops."""
|
||||
|
||||
try:
|
||||
logger.debug("OpenIsle MCP server lifespan started.")
|
||||
yield
|
||||
finally:
|
||||
logger.debug("Disposing shared SearchClient instance.")
|
||||
await search_client.aclose()
|
||||
|
||||
|
||||
app = FastMCP(
|
||||
name="openisle-mcp",
|
||||
instructions=(
|
||||
"Use this server to search OpenIsle content, reply to comments with an authentication "
|
||||
"token, and list posts created within a recent time window."
|
||||
"Use this server to search OpenIsle content, create new posts, reply to posts and "
|
||||
"comments with an authentication token, retrieve details for a specific post, list "
|
||||
"posts created within a recent time window, and review unread notification messages."
|
||||
),
|
||||
host=settings.host,
|
||||
port=settings.port,
|
||||
@@ -65,6 +93,7 @@ async def search(
|
||||
raise ValueError("Keyword must not be empty.")
|
||||
|
||||
try:
|
||||
logger.info("Received search request for keyword='%s'", sanitized)
|
||||
raw_results = await search_client.global_search(sanitized)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
@@ -90,10 +119,124 @@ async def search(
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.info(f"Search keyword '{sanitized}' returned {len(results)} results.")
|
||||
logger.debug(
|
||||
"Validated %d search results for keyword='%s'",
|
||||
len(results),
|
||||
sanitized,
|
||||
)
|
||||
|
||||
return SearchResponse(keyword=sanitized, total=len(results), results=results)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="reply_to_post",
|
||||
description="Create a comment on a post using an authentication token.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def reply_to_post(
|
||||
post_id: Annotated[
|
||||
int,
|
||||
PydanticField(ge=1, description="Identifier of the post being replied to."),
|
||||
],
|
||||
content: Annotated[
|
||||
str,
|
||||
PydanticField(description="Markdown content of the reply."),
|
||||
],
|
||||
captcha: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Optional captcha solution if the backend requires it.",
|
||||
),
|
||||
] = None,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> CommentCreateResult:
|
||||
"""Create a comment on a post and return the backend payload."""
|
||||
|
||||
sanitized_content = content.strip()
|
||||
if not sanitized_content:
|
||||
raise ValueError("Reply content must not be empty.")
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Creating reply for post_id=%s (captcha=%s)",
|
||||
post_id,
|
||||
bool(sanitized_captcha),
|
||||
)
|
||||
raw_comment = await search_client.reply_to_post(
|
||||
post_id,
|
||||
sanitized_token,
|
||||
sanitized_content,
|
||||
sanitized_captcha,
|
||||
)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
status_code = exc.response.status_code
|
||||
if status_code == 401:
|
||||
message = (
|
||||
"Authentication failed while replying to post "
|
||||
f"{post_id}. Please verify the token."
|
||||
)
|
||||
elif status_code == 403:
|
||||
message = (
|
||||
"The provided token is not authorized to reply to post "
|
||||
f"{post_id}."
|
||||
)
|
||||
elif status_code == 404:
|
||||
message = f"Post {post_id} was not found."
|
||||
else:
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{status_code} while replying to post {post_id}."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
"Unable to reach OpenIsle backend comment service: "
|
||||
f"{exc}."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
try:
|
||||
comment = CommentData.model_validate(raw_comment)
|
||||
except ValidationError as exc:
|
||||
message = "Received malformed data from the post comment endpoint."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.info(
|
||||
"Reply created successfully for post "
|
||||
f"{post_id}."
|
||||
)
|
||||
logger.debug(
|
||||
"Validated reply comment payload for post_id=%s (comment_id=%s)",
|
||||
post_id,
|
||||
comment.id,
|
||||
)
|
||||
|
||||
return CommentCreateResult(comment=comment)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="reply_to_comment",
|
||||
description="Reply to an existing comment using an authentication token.",
|
||||
@@ -104,7 +247,6 @@ async def reply_to_comment(
|
||||
int,
|
||||
PydanticField(ge=1, description="Identifier of the comment being replied to."),
|
||||
],
|
||||
token: Annotated[str, PydanticField(description="JWT bearer token for the user performing the reply.")],
|
||||
content: Annotated[
|
||||
str,
|
||||
PydanticField(description="Markdown content of the reply."),
|
||||
@@ -116,6 +258,15 @@ async def reply_to_comment(
|
||||
description="Optional captcha solution if the backend requires it.",
|
||||
),
|
||||
] = None,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> CommentReplyResult:
|
||||
"""Create a reply for a comment and return the backend payload."""
|
||||
@@ -124,13 +275,16 @@ async def reply_to_comment(
|
||||
if not sanitized_content:
|
||||
raise ValueError("Reply content must not be empty.")
|
||||
|
||||
sanitized_token = token.strip()
|
||||
if not sanitized_token:
|
||||
raise ValueError("Authentication token must not be empty.")
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Creating reply for comment_id=%s (captcha=%s)",
|
||||
comment_id,
|
||||
bool(sanitized_captcha),
|
||||
)
|
||||
raw_comment = await search_client.reply_to_comment(
|
||||
comment_id,
|
||||
sanitized_token,
|
||||
@@ -179,10 +333,352 @@ async def reply_to_comment(
|
||||
"Reply created successfully for comment "
|
||||
f"{comment_id}."
|
||||
)
|
||||
logger.debug(
|
||||
"Validated reply payload for comment_id=%s (reply_id=%s)",
|
||||
comment_id,
|
||||
comment.id,
|
||||
)
|
||||
|
||||
return CommentReplyResult(comment=comment)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="create_post",
|
||||
description="Publish a new post using an authentication token.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def create_post(
|
||||
title: Annotated[
|
||||
str,
|
||||
PydanticField(description="Title of the post to be created."),
|
||||
],
|
||||
content: Annotated[
|
||||
str,
|
||||
PydanticField(description="Markdown content of the post."),
|
||||
],
|
||||
category_id: Annotated[
|
||||
int | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
ge=1,
|
||||
description="Optional category identifier for the post.",
|
||||
),
|
||||
] = None,
|
||||
tag_ids: Annotated[
|
||||
list[int] | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
min_length=1,
|
||||
description="Optional list of tag identifiers to assign to the post.",
|
||||
),
|
||||
] = None,
|
||||
post_type: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Optional post type value (e.g. LOTTERY, POLL).",
|
||||
),
|
||||
] = None,
|
||||
visible_scope: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Optional visibility scope for the post.",
|
||||
),
|
||||
] = None,
|
||||
prize_description: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Description of the prize for lottery posts.",
|
||||
),
|
||||
] = None,
|
||||
prize_icon: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Icon URL for the lottery prize.",
|
||||
),
|
||||
] = None,
|
||||
prize_count: Annotated[
|
||||
int | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
ge=1,
|
||||
description="Total number of prizes available for lottery posts.",
|
||||
),
|
||||
] = None,
|
||||
point_cost: Annotated[
|
||||
int | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
ge=0,
|
||||
description="Point cost required to participate in the post, when applicable.",
|
||||
),
|
||||
] = None,
|
||||
start_time: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="ISO 8601 start time for lottery or poll posts.",
|
||||
),
|
||||
] = None,
|
||||
end_time: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="ISO 8601 end time for lottery or poll posts.",
|
||||
),
|
||||
] = None,
|
||||
options: Annotated[
|
||||
list[str] | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
min_length=1,
|
||||
description="Poll options when creating a poll post.",
|
||||
),
|
||||
] = None,
|
||||
multiple: Annotated[
|
||||
bool | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Whether the poll allows selecting multiple options.",
|
||||
),
|
||||
] = None,
|
||||
proposed_name: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Proposed category name for suggestion posts.",
|
||||
),
|
||||
] = None,
|
||||
proposal_description: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Supporting description for the proposed category.",
|
||||
),
|
||||
] = None,
|
||||
captcha: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Captcha solution if the backend requires one to create posts.",
|
||||
),
|
||||
] = None,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> PostCreateResult:
|
||||
"""Create a new post in OpenIsle and return the detailed backend payload."""
|
||||
|
||||
sanitized_title = title.strip()
|
||||
if not sanitized_title:
|
||||
raise ValueError("Post title must not be empty.")
|
||||
|
||||
sanitized_content = content.strip()
|
||||
if not sanitized_content:
|
||||
raise ValueError("Post content must not be empty.")
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
sanitized_category_id: int | None = None
|
||||
if category_id is not None:
|
||||
if isinstance(category_id, bool):
|
||||
raise ValueError("Category identifier must be an integer, not a boolean.")
|
||||
try:
|
||||
sanitized_category_id = int(category_id)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError("Category identifier must be an integer.") from exc
|
||||
if sanitized_category_id <= 0:
|
||||
raise ValueError("Category identifier must be a positive integer.")
|
||||
|
||||
sanitized_tag_ids: list[int] | None = None
|
||||
if tag_ids is not None:
|
||||
sanitized_tag_ids = []
|
||||
for value in tag_ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Tag identifiers must be integers, not booleans.")
|
||||
try:
|
||||
converted = int(value)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError("Tag identifiers must be integers.") from exc
|
||||
if converted <= 0:
|
||||
raise ValueError("Tag identifiers must be positive integers.")
|
||||
sanitized_tag_ids.append(converted)
|
||||
if not sanitized_tag_ids:
|
||||
sanitized_tag_ids = None
|
||||
|
||||
sanitized_post_type = post_type.strip() if isinstance(post_type, str) else None
|
||||
if sanitized_post_type == "":
|
||||
sanitized_post_type = None
|
||||
|
||||
sanitized_visible_scope = (
|
||||
visible_scope.strip() if isinstance(visible_scope, str) else None
|
||||
)
|
||||
if sanitized_visible_scope == "":
|
||||
sanitized_visible_scope = None
|
||||
|
||||
sanitized_prize_description = (
|
||||
prize_description.strip() if isinstance(prize_description, str) else None
|
||||
)
|
||||
if sanitized_prize_description == "":
|
||||
sanitized_prize_description = None
|
||||
|
||||
sanitized_prize_icon = prize_icon.strip() if isinstance(prize_icon, str) else None
|
||||
if sanitized_prize_icon == "":
|
||||
sanitized_prize_icon = None
|
||||
|
||||
sanitized_prize_count: int | None = None
|
||||
if prize_count is not None:
|
||||
if isinstance(prize_count, bool):
|
||||
raise ValueError("Prize count must be an integer, not a boolean.")
|
||||
try:
|
||||
sanitized_prize_count = int(prize_count)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError("Prize count must be an integer.") from exc
|
||||
if sanitized_prize_count <= 0:
|
||||
raise ValueError("Prize count must be a positive integer.")
|
||||
|
||||
sanitized_point_cost: int | None = None
|
||||
if point_cost is not None:
|
||||
if isinstance(point_cost, bool):
|
||||
raise ValueError("Point cost must be an integer, not a boolean.")
|
||||
try:
|
||||
sanitized_point_cost = int(point_cost)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError("Point cost must be an integer.") from exc
|
||||
if sanitized_point_cost < 0:
|
||||
raise ValueError("Point cost cannot be negative.")
|
||||
|
||||
sanitized_start_time = start_time.strip() if isinstance(start_time, str) else None
|
||||
if sanitized_start_time == "":
|
||||
sanitized_start_time = None
|
||||
|
||||
sanitized_end_time = end_time.strip() if isinstance(end_time, str) else None
|
||||
if sanitized_end_time == "":
|
||||
sanitized_end_time = None
|
||||
|
||||
sanitized_options: list[str] | None = None
|
||||
if options is not None:
|
||||
sanitized_options = []
|
||||
for option in options:
|
||||
if option is None:
|
||||
continue
|
||||
stripped_option = option.strip()
|
||||
if stripped_option:
|
||||
sanitized_options.append(stripped_option)
|
||||
if not sanitized_options:
|
||||
sanitized_options = None
|
||||
|
||||
sanitized_multiple = bool(multiple) if isinstance(multiple, bool) else None
|
||||
|
||||
sanitized_proposed_name = (
|
||||
proposed_name.strip() if isinstance(proposed_name, str) else None
|
||||
)
|
||||
if sanitized_proposed_name == "":
|
||||
sanitized_proposed_name = None
|
||||
|
||||
sanitized_proposal_description = (
|
||||
proposal_description.strip() if isinstance(proposal_description, str) else None
|
||||
)
|
||||
if sanitized_proposal_description == "":
|
||||
sanitized_proposal_description = None
|
||||
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
if sanitized_captcha == "":
|
||||
sanitized_captcha = None
|
||||
|
||||
payload: dict[str, object] = {
|
||||
"title": sanitized_title,
|
||||
"content": sanitized_content,
|
||||
}
|
||||
if sanitized_category_id is not None:
|
||||
payload["categoryId"] = sanitized_category_id
|
||||
if sanitized_tag_ids is not None:
|
||||
payload["tagIds"] = sanitized_tag_ids
|
||||
if sanitized_post_type is not None:
|
||||
payload["type"] = sanitized_post_type
|
||||
if sanitized_visible_scope is not None:
|
||||
payload["postVisibleScopeType"] = sanitized_visible_scope
|
||||
if sanitized_prize_description is not None:
|
||||
payload["prizeDescription"] = sanitized_prize_description
|
||||
if sanitized_prize_icon is not None:
|
||||
payload["prizeIcon"] = sanitized_prize_icon
|
||||
if sanitized_prize_count is not None:
|
||||
payload["prizeCount"] = sanitized_prize_count
|
||||
if sanitized_point_cost is not None:
|
||||
payload["pointCost"] = sanitized_point_cost
|
||||
if sanitized_start_time is not None:
|
||||
payload["startTime"] = sanitized_start_time
|
||||
if sanitized_end_time is not None:
|
||||
payload["endTime"] = sanitized_end_time
|
||||
if sanitized_options is not None:
|
||||
payload["options"] = sanitized_options
|
||||
if sanitized_multiple is not None:
|
||||
payload["multiple"] = sanitized_multiple
|
||||
if sanitized_proposed_name is not None:
|
||||
payload["proposedName"] = sanitized_proposed_name
|
||||
if sanitized_proposal_description is not None:
|
||||
payload["proposalDescription"] = sanitized_proposal_description
|
||||
if sanitized_captcha is not None:
|
||||
payload["captcha"] = sanitized_captcha
|
||||
|
||||
try:
|
||||
logger.info("Creating post with title='%s'", sanitized_title)
|
||||
raw_post = await search_client.create_post(payload, token=sanitized_token)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
status_code = exc.response.status_code
|
||||
if status_code == 400:
|
||||
message = (
|
||||
"Post creation failed due to invalid input or captcha verification errors."
|
||||
)
|
||||
elif status_code == 401:
|
||||
message = "Authentication failed while creating the post. Please verify the token."
|
||||
elif status_code == 403:
|
||||
message = "The provided token is not authorized to create posts."
|
||||
else:
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{status_code} while creating the post."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = f"Unable to reach OpenIsle backend post service: {exc}."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
try:
|
||||
post = PostDetail.model_validate(raw_post)
|
||||
except ValidationError as exc:
|
||||
message = "Received malformed data from the post creation endpoint."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.info(f"Post '{post.title}' created successfully.")
|
||||
logger.debug(
|
||||
"Validated created post payload with id=%s and title='%s'",
|
||||
post.id,
|
||||
post.title,
|
||||
)
|
||||
|
||||
return PostCreateResult(post=post)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="recent_posts",
|
||||
description="Retrieve posts created in the last N minutes.",
|
||||
@@ -198,6 +694,7 @@ async def recent_posts(
|
||||
"""Fetch recent posts from the backend and return structured data."""
|
||||
|
||||
try:
|
||||
logger.info("Fetching recent posts for last %s minutes", minutes)
|
||||
raw_posts = await search_client.recent_posts(minutes)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
@@ -225,10 +722,250 @@ async def recent_posts(
|
||||
await ctx.info(
|
||||
f"Found {len(posts)} posts created within the last {minutes} minutes."
|
||||
)
|
||||
logger.debug(
|
||||
"Validated %d recent posts for window=%s minutes",
|
||||
len(posts),
|
||||
minutes,
|
||||
)
|
||||
|
||||
return RecentPostsResponse(minutes=minutes, total=len(posts), posts=posts)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="get_post",
|
||||
description="Retrieve detailed information for a single post.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def get_post(
|
||||
post_id: Annotated[
|
||||
int,
|
||||
PydanticField(ge=1, description="Identifier of the post to retrieve."),
|
||||
],
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Optional JWT bearer token to view the post as an authenticated user.",
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> PostDetail:
|
||||
"""Fetch post details from the backend and validate the response."""
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
try:
|
||||
logger.info("Fetching post details for post_id=%s", post_id)
|
||||
raw_post = await search_client.get_post(post_id, sanitized_token)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
status_code = exc.response.status_code
|
||||
if status_code == 404:
|
||||
message = f"Post {post_id} was not found."
|
||||
elif status_code == 401:
|
||||
message = "Authentication failed while retrieving the post."
|
||||
elif status_code == 403:
|
||||
message = "The provided token is not authorized to view this post."
|
||||
else:
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{status_code} while retrieving post {post_id}."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = f"Unable to reach OpenIsle backend post service: {exc}."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
try:
|
||||
post = PostDetail.model_validate(raw_post)
|
||||
except ValidationError as exc:
|
||||
message = "Received malformed data from the post detail endpoint."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.info(f"Retrieved post {post_id} successfully.")
|
||||
logger.debug(
|
||||
"Validated post payload for post_id=%s with %d comments",
|
||||
post_id,
|
||||
len(post.comments),
|
||||
)
|
||||
|
||||
return post
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="list_unread_messages",
|
||||
description="List unread notification messages for the authenticated user.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def list_unread_messages(
|
||||
page: Annotated[
|
||||
int,
|
||||
PydanticField(
|
||||
default=0,
|
||||
ge=0,
|
||||
description="Page number of unread notifications to retrieve.",
|
||||
),
|
||||
] = 0,
|
||||
size: Annotated[
|
||||
int,
|
||||
PydanticField(
|
||||
default=30,
|
||||
ge=1,
|
||||
le=100,
|
||||
description="Number of unread notifications to include per page.",
|
||||
),
|
||||
] = 30,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> UnreadNotificationsResponse:
|
||||
"""Retrieve unread notifications and return structured data."""
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Fetching unread notifications (page=%s, size=%s)",
|
||||
page,
|
||||
size,
|
||||
)
|
||||
raw_notifications = await search_client.list_unread_notifications(
|
||||
page=page,
|
||||
size=size,
|
||||
token=sanitized_token,
|
||||
)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{exc.response.status_code} while fetching unread notifications."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = f"Unable to reach OpenIsle backend notification service: {exc}."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
try:
|
||||
notifications = [
|
||||
NotificationData.model_validate(entry) for entry in raw_notifications
|
||||
]
|
||||
except ValidationError as exc:
|
||||
message = "Received malformed data from the unread notifications endpoint."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
total = len(notifications)
|
||||
if ctx is not None:
|
||||
await ctx.info(
|
||||
f"Retrieved {total} unread notifications (page {page}, size {size})."
|
||||
)
|
||||
logger.debug(
|
||||
"Validated %d unread notifications for page=%s size=%s",
|
||||
total,
|
||||
page,
|
||||
size,
|
||||
)
|
||||
|
||||
return UnreadNotificationsResponse(
|
||||
page=page,
|
||||
size=size,
|
||||
total=total,
|
||||
notifications=notifications,
|
||||
)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="mark_notifications_read",
|
||||
description="Mark specific notification messages as read to remove them from the unread list.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def mark_notifications_read(
|
||||
ids: Annotated[
|
||||
list[int],
|
||||
PydanticField(
|
||||
min_length=1,
|
||||
description="Notification identifiers that should be marked as read.",
|
||||
),
|
||||
],
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> NotificationCleanupResult:
|
||||
"""Mark the supplied notifications as read and report the processed identifiers."""
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Marking %d notifications as read", # pragma: no branch - logging
|
||||
len(ids),
|
||||
)
|
||||
await search_client.mark_notifications_read(ids, token=sanitized_token)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{exc.response.status_code} while marking notifications as read."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = f"Unable to reach OpenIsle backend notification service: {exc}."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
processed_ids: list[int] = []
|
||||
for value in ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Notification identifiers must be integers, not booleans.")
|
||||
converted = int(value)
|
||||
if converted <= 0:
|
||||
raise ValueError("Notification identifiers must be positive integers.")
|
||||
processed_ids.append(converted)
|
||||
if ctx is not None:
|
||||
await ctx.info(
|
||||
f"Marked {len(processed_ids)} notifications as read.",
|
||||
)
|
||||
logger.debug(
|
||||
"Successfully marked notifications as read: ids=%s",
|
||||
processed_ids,
|
||||
)
|
||||
|
||||
return NotificationCleanupResult(
|
||||
processed_ids=processed_ids,
|
||||
total_marked=len(processed_ids),
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the MCP server using the configured transport."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user