mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-07 15:41:02 +08:00
Compare commits
12 Commits
codex/add-
...
feature/bo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87677f5968 | ||
|
|
fd93a2dc61 | ||
|
|
80f862a226 | ||
|
|
26bb85f4d4 | ||
|
|
398b4b482f | ||
|
|
2cfb302981 | ||
|
|
e75bd76b71 | ||
|
|
99c3ac1837 | ||
|
|
749ab560ff | ||
|
|
541ad4d149 | ||
|
|
03eb027ea4 | ||
|
|
4194b2be91 |
28
.github/workflows/reply-bots.yml
vendored
Normal file
28
.github/workflows/reply-bots.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Reply Bots
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "*/30 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-reply-bot:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install --no-save @openai/agents ts-node typescript
|
||||
|
||||
- name: Run reply bot
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
OPENISLE_TOKEN: ${{ secrets.OPENISLE_TOKEN }}
|
||||
run: npx ts-node --esm bots/reply_bots.ts
|
||||
131
bots/reply_bots.ts
Normal file
131
bots/reply_bots.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
// reply_bot.ts
|
||||
import { Agent, Runner, hostedMcpTool, withTrace } from "@openai/agents";
|
||||
|
||||
console.log("✅ Reply bot starting...");
|
||||
|
||||
const allowedMcpTools = [
|
||||
"search",
|
||||
"reply_to_post",
|
||||
"reply_to_comment",
|
||||
"recent_posts",
|
||||
"get_post",
|
||||
"list_unread_messages",
|
||||
"mark_notifications_read",
|
||||
];
|
||||
|
||||
console.log("🛠️ Configured Hosted MCP tools:", allowedMcpTools.join(", "));
|
||||
|
||||
// ---- MCP 工具(Hosted MCP) ----
|
||||
// 关键点:requireApproval 设为 "never",避免卡在人工批准。
|
||||
const mcp = hostedMcpTool({
|
||||
serverLabel: "openisle_mcp",
|
||||
serverUrl: "https://www.open-isle.com/mcp",
|
||||
allowedTools: allowedMcpTools,
|
||||
requireApproval: "never",
|
||||
});
|
||||
|
||||
type WorkflowInput = { input_as_text: string };
|
||||
|
||||
// 从环境变量读取你的站点鉴权令牌(可选)
|
||||
const OPENISLE_TOKEN = process.env.OPENISLE_TOKEN ?? "";
|
||||
|
||||
console.log(
|
||||
OPENISLE_TOKEN
|
||||
? "🔑 OPENISLE_TOKEN detected in environment."
|
||||
: "🔓 OPENISLE_TOKEN not set; agent will request it if required."
|
||||
);
|
||||
|
||||
// ---- 定义 Agent ----
|
||||
const openisleBot = new Agent({
|
||||
name: "OpenIsle Bot",
|
||||
instructions: [
|
||||
"You are a helpful and cute assistant for https://www.open-isle.com. Please use plenty of kawaii kaomoji (颜表情), such as (๑˃ᴗ˂)ﻭ, (•̀ω•́)✧, (。•ᴗ-)_♡, (⁎⁍̴̛ᴗ⁍̴̛⁎), etc., in your replies to create a friendly, adorable vibe.",
|
||||
"Finish tasks end-to-end before replying. If multiple MCP tools are needed, call them sequentially until the task is truly done.",
|
||||
"When presenting the result, reply in Chinese with a concise, cute summary filled with kaomoji and include any important URLs or IDs.",
|
||||
OPENISLE_TOKEN
|
||||
? `If tools require auth, use this token exactly where the tool schema expects it: ${OPENISLE_TOKEN}`
|
||||
: "If a tool requires auth, ask me to provide OPENISLE_TOKEN via env.",
|
||||
"After finishing replies, call mark_notifications_read with all processed notification IDs to keep the inbox clean.",
|
||||
].join("\n"),
|
||||
tools: [mcp],
|
||||
model: "gpt-4o",
|
||||
modelSettings: {
|
||||
temperature: 0.7,
|
||||
topP: 1,
|
||||
maxTokens: 2048,
|
||||
toolChoice: "auto",
|
||||
store: true,
|
||||
},
|
||||
});
|
||||
|
||||
// ---- 入口函数:跑到拿到 finalOutput 为止,然后输出并退出 ----
|
||||
export const runWorkflow = async (workflow: WorkflowInput) => {
|
||||
// 强烈建议在外部(shell)设置 OPENAI_API_KEY
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
throw new Error("Missing OPENAI_API_KEY");
|
||||
}
|
||||
|
||||
const runner = new Runner({
|
||||
workflowName: "OpenIsle Bot",
|
||||
traceMetadata: {
|
||||
__trace_source__: "agent-builder",
|
||||
workflow_id: "wf_69003cbd47e08190928745d3c806c0b50d1a01cfae052be8",
|
||||
},
|
||||
// 如需完全禁用上报可加:tracingDisabled: true
|
||||
});
|
||||
|
||||
return await withTrace("OpenIsle Bot run", async () => {
|
||||
const preview = workflow.input_as_text.trim();
|
||||
console.log(
|
||||
"📝 Received workflow input (preview):",
|
||||
preview.length > 200 ? `${preview.slice(0, 200)}…` : preview
|
||||
);
|
||||
|
||||
// Runner.run 会自动循环执行:LLM → 工具 → 直至 finalOutput
|
||||
console.log("🚦 Starting agent run with maxTurns=16...");
|
||||
const result = await runner.run(openisleBot, workflow.input_as_text, {
|
||||
maxTurns: 16, // 允许更复杂任务多轮调用 MCP
|
||||
// stream: true // 如需边跑边看事件可打开,然后消费流事件
|
||||
});
|
||||
|
||||
console.log("📬 Agent run completed. Result keys:", Object.keys(result));
|
||||
|
||||
if (!result.finalOutput) {
|
||||
// 若没产出最终结果,通常是启用了人工批准/工具失败/达到 maxTurns
|
||||
throw new Error("Agent result is undefined (no final output).");
|
||||
}
|
||||
|
||||
const openisleBotResult = { output_text: String(result.finalOutput) };
|
||||
|
||||
console.log(
|
||||
"🤖 Agent result (length=%d):\n%s",
|
||||
openisleBotResult.output_text.length,
|
||||
openisleBotResult.output_text
|
||||
);
|
||||
return openisleBotResult;
|
||||
});
|
||||
};
|
||||
|
||||
// ---- CLI 运行(示范)----
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const query = `
|
||||
【AUTO】无需确认,自动处理所有未读的提及与评论:
|
||||
1)调用 list_unread_messages;
|
||||
2)依次处理每条“提及/评论”:如需上下文则使用 get_post 获取,生成简明中文回复;如有 commentId 则用 reply_to_comment,否则用 reply_to_post;
|
||||
3)跳过关注和系统事件;
|
||||
4)保证幂等性:如该贴最后一条是你自己发的回复,则跳过;
|
||||
5)调用 mark_notifications_read,传入本次已处理的通知 ID 清理已读;
|
||||
6)最多只处理最新10条;结束时仅输出简要摘要(包含URL或ID)。
|
||||
`;
|
||||
|
||||
console.log("🔍 Running workflow...");
|
||||
await runWorkflow({ input_as_text: query });
|
||||
process.exit(0);
|
||||
} catch (err: any) {
|
||||
console.error("❌ Agent failed:", err?.stack || err);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
}
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from pydantic import BaseModel, Field, ConfigDict, field_validator
|
||||
|
||||
|
||||
class SearchResultItem(BaseModel):
|
||||
@@ -170,6 +170,15 @@ class CommentData(BaseModel):
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("replies", "reactions", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comment_lists(cls, value: Any) -> list[Any]:
|
||||
"""Convert ``None`` payloads to empty lists for comment collections."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class CommentReplyResult(BaseModel):
|
||||
"""Structured response returned when replying to a comment."""
|
||||
@@ -253,6 +262,15 @@ class PostSummary(BaseModel):
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("tags", "reactions", "participants", mode="before")
|
||||
@classmethod
|
||||
def _ensure_post_lists(cls, value: Any) -> list[Any]:
|
||||
"""Normalize ``None`` values returned by the backend to empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class RecentPostsResponse(BaseModel):
|
||||
"""Structured response for the recent posts tool."""
|
||||
@@ -278,6 +296,15 @@ class PostDetail(PostSummary):
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
||||
|
||||
@field_validator("comments", mode="before")
|
||||
@classmethod
|
||||
def _ensure_comments_list(cls, value: Any) -> list[Any]:
|
||||
"""Treat ``None`` comments payloads as empty lists."""
|
||||
|
||||
if value is None:
|
||||
return []
|
||||
return value
|
||||
|
||||
|
||||
class NotificationData(BaseModel):
|
||||
"""Unread notification payload returned by the backend."""
|
||||
@@ -331,3 +358,15 @@ class UnreadNotificationsResponse(BaseModel):
|
||||
default_factory=list,
|
||||
description="Unread notifications returned by the backend.",
|
||||
)
|
||||
|
||||
|
||||
class NotificationCleanupResult(BaseModel):
|
||||
"""Structured response returned after marking notifications as read."""
|
||||
|
||||
processed_ids: list[int] = Field(
|
||||
default_factory=list,
|
||||
description="Identifiers that were marked as read in the backend.",
|
||||
)
|
||||
total_marked: int = Field(
|
||||
description="Total number of notifications successfully marked as read.",
|
||||
)
|
||||
|
||||
@@ -253,6 +253,53 @@ class SearchClient:
|
||||
)
|
||||
return [self._ensure_dict(entry) for entry in payload]
|
||||
|
||||
async def mark_notifications_read(
|
||||
self,
|
||||
ids: list[int],
|
||||
*,
|
||||
token: str | None = None,
|
||||
) -> None:
|
||||
"""Mark the provided notifications as read for the authenticated user."""
|
||||
|
||||
if not ids:
|
||||
raise ValueError(
|
||||
"At least one notification identifier must be provided to mark as read."
|
||||
)
|
||||
|
||||
sanitized_ids: list[int] = []
|
||||
for value in ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Notification identifiers must be integers, not booleans.")
|
||||
try:
|
||||
converted = int(value)
|
||||
except (TypeError, ValueError) as exc: # pragma: no cover - defensive
|
||||
raise ValueError(
|
||||
"Notification identifiers must be integers."
|
||||
) from exc
|
||||
if converted <= 0:
|
||||
raise ValueError(
|
||||
"Notification identifiers must be positive integers."
|
||||
)
|
||||
sanitized_ids.append(converted)
|
||||
|
||||
client = self._get_client()
|
||||
resolved_token = self._require_token(token)
|
||||
logger.debug(
|
||||
"Marking %d notifications as read: ids=%s",
|
||||
len(sanitized_ids),
|
||||
sanitized_ids,
|
||||
)
|
||||
response = await client.post(
|
||||
"/api/notifications/read",
|
||||
json={"ids": sanitized_ids},
|
||||
headers=self._build_headers(token=resolved_token, include_json=True),
|
||||
)
|
||||
response.raise_for_status()
|
||||
logger.info(
|
||||
"Successfully marked %d notifications as read.",
|
||||
len(sanitized_ids),
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
"""Dispose of the underlying HTTP client."""
|
||||
|
||||
|
||||
@@ -4,13 +4,12 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Annotated, Any
|
||||
from typing import Annotated
|
||||
|
||||
import httpx
|
||||
from mcp.server.fastmcp import Context, FastMCP
|
||||
from pydantic import ValidationError
|
||||
from pydantic import Field as PydanticField
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
from .config import get_settings
|
||||
from .schemas import (
|
||||
@@ -18,6 +17,7 @@ from .schemas import (
|
||||
CommentData,
|
||||
CommentReplyResult,
|
||||
NotificationData,
|
||||
NotificationCleanupResult,
|
||||
UnreadNotificationsResponse,
|
||||
PostDetail,
|
||||
PostSummary,
|
||||
@@ -51,69 +51,6 @@ search_client = SearchClient(
|
||||
)
|
||||
|
||||
|
||||
class SessionTokenManager:
|
||||
"""Cache JWT access tokens on a per-session basis."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._tokens: WeakKeyDictionary[Any, str] = WeakKeyDictionary()
|
||||
|
||||
def resolve(
|
||||
self, ctx: Context | None, token: str | None = None
|
||||
) -> str | None:
|
||||
"""Resolve and optionally persist the token for the current session."""
|
||||
|
||||
session = self._get_session(ctx)
|
||||
|
||||
if isinstance(token, str):
|
||||
stripped = token.strip()
|
||||
if stripped:
|
||||
if session is not None:
|
||||
self._tokens[session] = stripped
|
||||
logger.debug(
|
||||
"Stored JWT token for session %s.",
|
||||
self._describe_session(session),
|
||||
)
|
||||
return stripped
|
||||
|
||||
if session is not None and session in self._tokens:
|
||||
logger.debug(
|
||||
"Clearing stored JWT token for session %s due to empty input.",
|
||||
self._describe_session(session),
|
||||
)
|
||||
del self._tokens[session]
|
||||
return None
|
||||
|
||||
if session is not None:
|
||||
cached = self._tokens.get(session)
|
||||
if cached:
|
||||
logger.debug(
|
||||
"Reusing cached JWT token for session %s.",
|
||||
self._describe_session(session),
|
||||
)
|
||||
return cached
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_session(ctx: Context | None) -> Any | None:
|
||||
if ctx is None:
|
||||
return None
|
||||
try:
|
||||
return ctx.session
|
||||
except Exception: # pragma: no cover - defensive guard
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _describe_session(session: Any) -> str:
|
||||
identifier = getattr(session, "mcp_session_id", None)
|
||||
if isinstance(identifier, str) and identifier:
|
||||
return identifier
|
||||
return hex(id(session))
|
||||
|
||||
|
||||
session_token_manager = SessionTokenManager()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastMCP):
|
||||
"""Lifecycle hook that disposes shared resources when the server stops."""
|
||||
@@ -129,8 +66,8 @@ async def lifespan(_: FastMCP):
|
||||
app = FastMCP(
|
||||
name="openisle-mcp",
|
||||
instructions=(
|
||||
"Use this server to search OpenIsle content, reply to posts and comments with "
|
||||
"session-managed authentication, retrieve details for a specific post, list posts created "
|
||||
"Use this server to search OpenIsle content, reply to posts and comments with an "
|
||||
"authentication token, retrieve details for a specific post, list posts created "
|
||||
"within a recent time window, and review unread notification messages."
|
||||
),
|
||||
host=settings.host,
|
||||
@@ -139,25 +76,6 @@ app = FastMCP(
|
||||
)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="set_token",
|
||||
description=(
|
||||
"Set JWT token for the current session to be reused by other tools."
|
||||
),
|
||||
)
|
||||
async def set_token(
|
||||
token: Annotated[
|
||||
str,
|
||||
PydanticField(description="JWT token string."),
|
||||
],
|
||||
ctx: Context | None = None,
|
||||
) -> str:
|
||||
"""Persist a JWT token for the active MCP session."""
|
||||
|
||||
session_token_manager.resolve(ctx, token)
|
||||
return "Token stored successfully."
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="search",
|
||||
description="Perform a global search across OpenIsle resources.",
|
||||
@@ -211,7 +129,7 @@ async def search(
|
||||
|
||||
@app.tool(
|
||||
name="reply_to_post",
|
||||
description="Create a comment on a post using session authentication.",
|
||||
description="Create a comment on a post using an authentication token.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def reply_to_post(
|
||||
@@ -230,6 +148,15 @@ async def reply_to_post(
|
||||
description="Optional captcha solution if the backend requires it.",
|
||||
),
|
||||
] = None,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> CommentCreateResult:
|
||||
"""Create a comment on a post and return the backend payload."""
|
||||
@@ -238,9 +165,11 @@ async def reply_to_post(
|
||||
if not sanitized_content:
|
||||
raise ValueError("Reply content must not be empty.")
|
||||
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
resolved_token = session_token_manager.resolve(ctx)
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
@@ -250,7 +179,7 @@ async def reply_to_post(
|
||||
)
|
||||
raw_comment = await search_client.reply_to_post(
|
||||
post_id,
|
||||
resolved_token,
|
||||
sanitized_token,
|
||||
sanitized_content,
|
||||
sanitized_captcha,
|
||||
)
|
||||
@@ -309,7 +238,7 @@ async def reply_to_post(
|
||||
|
||||
@app.tool(
|
||||
name="reply_to_comment",
|
||||
description="Reply to an existing comment using session authentication.",
|
||||
description="Reply to an existing comment using an authentication token.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def reply_to_comment(
|
||||
@@ -328,6 +257,15 @@ async def reply_to_comment(
|
||||
description="Optional captcha solution if the backend requires it.",
|
||||
),
|
||||
] = None,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> CommentReplyResult:
|
||||
"""Create a reply for a comment and return the backend payload."""
|
||||
@@ -336,9 +274,9 @@ async def reply_to_comment(
|
||||
if not sanitized_content:
|
||||
raise ValueError("Reply content must not be empty.")
|
||||
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
|
||||
resolved_token = session_token_manager.resolve(ctx)
|
||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
@@ -348,7 +286,7 @@ async def reply_to_comment(
|
||||
)
|
||||
raw_comment = await search_client.reply_to_comment(
|
||||
comment_id,
|
||||
resolved_token,
|
||||
sanitized_token,
|
||||
sanitized_content,
|
||||
sanitized_captcha,
|
||||
)
|
||||
@@ -465,15 +403,24 @@ async def get_post(
|
||||
int,
|
||||
PydanticField(ge=1, description="Identifier of the post to retrieve."),
|
||||
],
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description="Optional JWT bearer token to view the post as an authenticated user.",
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> PostDetail:
|
||||
"""Fetch post details from the backend and validate the response."""
|
||||
|
||||
resolved_token = session_token_manager.resolve(ctx)
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
try:
|
||||
logger.info("Fetching post details for post_id=%s", post_id)
|
||||
raw_post = await search_client.get_post(post_id, resolved_token)
|
||||
raw_post = await search_client.get_post(post_id, sanitized_token)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
status_code = exc.response.status_code
|
||||
if status_code == 404:
|
||||
@@ -538,11 +485,20 @@ async def list_unread_messages(
|
||||
description="Number of unread notifications to include per page.",
|
||||
),
|
||||
] = 30,
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> UnreadNotificationsResponse:
|
||||
"""Retrieve unread notifications and return structured data."""
|
||||
|
||||
resolved_token = session_token_manager.resolve(ctx)
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
@@ -553,7 +509,7 @@ async def list_unread_messages(
|
||||
raw_notifications = await search_client.list_unread_notifications(
|
||||
page=page,
|
||||
size=size,
|
||||
token=resolved_token,
|
||||
token=sanitized_token,
|
||||
)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
@@ -599,6 +555,79 @@ async def list_unread_messages(
|
||||
)
|
||||
|
||||
|
||||
@app.tool(
|
||||
name="mark_notifications_read",
|
||||
description="Mark specific notification messages as read to remove them from the unread list.",
|
||||
structured_output=True,
|
||||
)
|
||||
async def mark_notifications_read(
|
||||
ids: Annotated[
|
||||
list[int],
|
||||
PydanticField(
|
||||
min_length=1,
|
||||
description="Notification identifiers that should be marked as read.",
|
||||
),
|
||||
],
|
||||
token: Annotated[
|
||||
str | None,
|
||||
PydanticField(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
||||
),
|
||||
),
|
||||
] = None,
|
||||
ctx: Context | None = None,
|
||||
) -> NotificationCleanupResult:
|
||||
"""Mark the supplied notifications as read and report the processed identifiers."""
|
||||
|
||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
||||
if sanitized_token == "":
|
||||
sanitized_token = None
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Marking %d notifications as read", # pragma: no branch - logging
|
||||
len(ids),
|
||||
)
|
||||
await search_client.mark_notifications_read(ids, token=sanitized_token)
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
||||
message = (
|
||||
"OpenIsle backend returned HTTP "
|
||||
f"{exc.response.status_code} while marking notifications as read."
|
||||
)
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
||||
message = f"Unable to reach OpenIsle backend notification service: {exc}."
|
||||
if ctx is not None:
|
||||
await ctx.error(message)
|
||||
raise ValueError(message) from exc
|
||||
|
||||
processed_ids: list[int] = []
|
||||
for value in ids:
|
||||
if isinstance(value, bool):
|
||||
raise ValueError("Notification identifiers must be integers, not booleans.")
|
||||
converted = int(value)
|
||||
if converted <= 0:
|
||||
raise ValueError("Notification identifiers must be positive integers.")
|
||||
processed_ids.append(converted)
|
||||
if ctx is not None:
|
||||
await ctx.info(
|
||||
f"Marked {len(processed_ids)} notifications as read.",
|
||||
)
|
||||
logger.debug(
|
||||
"Successfully marked notifications as read: ids=%s",
|
||||
processed_ids,
|
||||
)
|
||||
|
||||
return NotificationCleanupResult(
|
||||
processed_ids=processed_ids,
|
||||
total_marked=len(processed_ids),
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the MCP server using the configured transport."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user