mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-21 06:20:59 +08:00
Compare commits
1 Commits
codex/crea
...
codex/crea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bcd6a3249d |
@@ -7,6 +7,15 @@ REDIS_PORT=6379
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_MANAGEMENT_PORT=15672
|
||||
|
||||
# === MCP Server ===
|
||||
OPENISLE_MCP_TRANSPORT=http
|
||||
OPENISLE_MCP_HOST=0.0.0.0
|
||||
OPENISLE_MCP_PORT=8974
|
||||
OPENISLE_API_BASE_URL=http://springboot:8080
|
||||
OPENISLE_API_TIMEOUT=10
|
||||
OPENISLE_MCP_DEFAULT_LIMIT=20
|
||||
OPENISLE_MCP_SNIPPET_LENGTH=160
|
||||
|
||||
# === OpenSearch Configuration ===
|
||||
OPENSEARCH_PORT=9200
|
||||
OPENSEARCH_METRICS_PORT=9600
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,7 +17,6 @@ dist
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
*.pem
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
|
||||
@@ -28,6 +28,7 @@ OpenIsle 是一个使用 Spring Boot 和 Vue 3 构建的全栈开源社区平台
|
||||
- 支持图片上传,默认使用腾讯云 COS 扩展
|
||||
- 默认头像使用 DiceBear Avatars,可通过 `AVATAR_STYLE` 和 `AVATAR_SIZE` 环境变量自定义主题和大小
|
||||
- 浏览器推送通知,离开网站也能及时收到提醒
|
||||
- 新增 Python MCP 搜索服务,方便 AI 助手通过统一协议检索社区内容
|
||||
|
||||
## 🌟 项目优势
|
||||
|
||||
|
||||
@@ -178,6 +178,34 @@ services:
|
||||
- dev
|
||||
- prod
|
||||
|
||||
mcp-server:
|
||||
build:
|
||||
context: ../mcp
|
||||
dockerfile: Dockerfile
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||
env_file:
|
||||
- ${ENV_FILE:-../.env}
|
||||
environment:
|
||||
OPENISLE_API_BASE_URL: ${OPENISLE_API_BASE_URL:-http://springboot:8080}
|
||||
OPENISLE_API_TIMEOUT: ${OPENISLE_API_TIMEOUT:-10}
|
||||
OPENISLE_MCP_DEFAULT_LIMIT: ${OPENISLE_MCP_DEFAULT_LIMIT:-20}
|
||||
OPENISLE_MCP_SNIPPET_LENGTH: ${OPENISLE_MCP_SNIPPET_LENGTH:-160}
|
||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-http}
|
||||
OPENISLE_MCP_HOST: 0.0.0.0
|
||||
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8974}
|
||||
ports:
|
||||
- "${OPENISLE_MCP_PORT:-8974}:${OPENISLE_MCP_PORT:-8974}"
|
||||
depends_on:
|
||||
springboot:
|
||||
condition: service_started
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- openisle-network
|
||||
profiles:
|
||||
- dev
|
||||
- dev_local_backend
|
||||
- prod
|
||||
|
||||
websocket-service:
|
||||
image: maven:3.9-eclipse-temurin-17
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
|
||||
@@ -213,32 +241,6 @@ services:
|
||||
- dev_local_backend
|
||||
- prod
|
||||
|
||||
mcp-service:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: mcp/Dockerfile
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||
env_file:
|
||||
- ${ENV_FILE:-../.env}
|
||||
environment:
|
||||
FASTMCP_HOST: 0.0.0.0
|
||||
FASTMCP_PORT: ${MCP_PORT:-8765}
|
||||
OPENISLE_BACKEND_URL: ${OPENISLE_BACKEND_URL:-http://springboot:8080}
|
||||
OPENISLE_BACKEND_TIMEOUT: ${OPENISLE_BACKEND_TIMEOUT:-10}
|
||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-sse}
|
||||
OPENISLE_MCP_SSE_MOUNT_PATH: ${OPENISLE_MCP_SSE_MOUNT_PATH:-/mcp}
|
||||
ports:
|
||||
- "${MCP_PORT:-8765}:${MCP_PORT:-8765}"
|
||||
depends_on:
|
||||
springboot:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- openisle-network
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
|
||||
frontend_dev:
|
||||
image: node:20
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev
|
||||
|
||||
@@ -1,17 +1,27 @@
|
||||
FROM python:3.11-slim AS runtime
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY mcp/pyproject.toml /app/pyproject.toml
|
||||
COPY mcp/README.md /app/README.md
|
||||
COPY mcp/src /app/src
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY pyproject.toml README.md ./
|
||||
COPY src ./src
|
||||
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install .
|
||||
&& pip install --no-cache-dir . \
|
||||
&& pip cache purge
|
||||
|
||||
EXPOSE 8765
|
||||
ENV OPENISLE_MCP_TRANSPORT=http \
|
||||
OPENISLE_MCP_HOST=0.0.0.0 \
|
||||
OPENISLE_MCP_PORT=8974
|
||||
|
||||
CMD ["openisle-mcp"]
|
||||
EXPOSE 8974
|
||||
|
||||
ENTRYPOINT ["openisle-mcp"]
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
# OpenIsle MCP Server
|
||||
|
||||
This package provides a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server that exposes the OpenIsle
|
||||
search capabilities to AI assistants. The server wraps the existing Spring Boot backend and currently provides a single `search`
|
||||
tool. Future iterations can extend the server with additional functionality such as publishing new posts or moderating content.
|
||||
This package provides a Python implementation of a Model Context Protocol (MCP) server for OpenIsle. The server focuses on the community search APIs so that AI assistants and other MCP-aware clients can discover OpenIsle users, posts, categories, comments, and tags. Additional capabilities such as content creation tools can be layered on later without changing the transport or deployment model.
|
||||
|
||||
## Features
|
||||
|
||||
- 🔍 **Global search** — delegates to the existing `/api/search/global` endpoint exposed by the OpenIsle backend.
|
||||
- 🧠 **Structured results** — responses include highlights and deep links so AI clients can present the results cleanly.
|
||||
- ⚙️ **Configurable** — point the server at any reachable OpenIsle backend by setting environment variables.
|
||||
- ✅ Implements the MCP tooling interface using [FastMCP](https://github.com/modelcontextprotocol/fastmcp).
|
||||
- 🔍 Exposes a `search` tool that proxies requests to the existing OpenIsle REST endpoints and normalises the response payload.
|
||||
- ⚙️ Configurable through environment variables for API base URL, timeout, result limits, and snippet size.
|
||||
- 🐳 Packaged with a Docker image so it can be launched alongside the other OpenIsle services.
|
||||
|
||||
## Environment variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
| --- | --- | --- |
|
||||
| `OPENISLE_API_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend REST API. |
|
||||
| `OPENISLE_API_TIMEOUT` | `10` | Timeout (in seconds) used when calling the backend search endpoints. |
|
||||
| `OPENISLE_MCP_DEFAULT_LIMIT` | `20` | Default maximum number of search results to return when the caller does not provide a limit. Use `0` or a negative number to disable limiting. |
|
||||
| `OPENISLE_MCP_SNIPPET_LENGTH` | `160` | Maximum length (in characters) of the normalised summary snippet returned by the MCP tool. |
|
||||
| `OPENISLE_MCP_TRANSPORT` | `stdio` | Transport used when running the server directly. Set to `http` when running inside Docker. |
|
||||
| `OPENISLE_MCP_HOST` | `127.0.0.1` | Bind host used when the transport is HTTP/SSE. |
|
||||
| `OPENISLE_MCP_PORT` | `8974` | Bind port used when the transport is HTTP/SSE. |
|
||||
|
||||
## Local development
|
||||
|
||||
@@ -16,24 +27,25 @@ tool. Future iterations can extend the server with additional functionality such
|
||||
cd mcp
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -e .
|
||||
openisle-mcp --transport stdio # or "sse"/"streamable-http"
|
||||
OPENISLE_API_BASE_URL=http://localhost:8080 OPENISLE_MCP_TRANSPORT=http openisle-mcp
|
||||
```
|
||||
|
||||
Environment variables:
|
||||
By default the server listens over stdio, which is useful when integrating with MCP-aware IDEs. When the `OPENISLE_MCP_TRANSPORT` variable is set to `http` the server will expose an HTTP transport on `OPENISLE_MCP_HOST:OPENISLE_MCP_PORT`.
|
||||
|
||||
| Variable | Description | Default |
|
||||
| --- | --- | --- |
|
||||
| `OPENISLE_BACKEND_URL` | Base URL of the Spring Boot backend | `http://springboot:8080` |
|
||||
| `OPENISLE_BACKEND_TIMEOUT` | Timeout (seconds) for backend HTTP calls | `10` |
|
||||
| `OPENISLE_PUBLIC_BASE_URL` | Optional base URL used to build deep links in search results | *(unset)* |
|
||||
| `OPENISLE_MCP_TRANSPORT` | MCP transport (`stdio`, `sse`, `streamable-http`) | `stdio` |
|
||||
| `OPENISLE_MCP_SSE_MOUNT_PATH` | Mount path when using SSE transport | `/mcp` |
|
||||
| `FASTMCP_HOST` | Host for SSE / HTTP transports | `127.0.0.1` |
|
||||
| `FASTMCP_PORT` | Port for SSE / HTTP transports | `8000` |
|
||||
## Docker image
|
||||
|
||||
## Docker
|
||||
The accompanying `Dockerfile` builds a minimal image that installs the package and starts the MCP server. The root Docker Compose manifest is configured to launch this service and connect it to the same internal network as the Spring Boot API so the MCP tools can reach the search endpoints.
|
||||
|
||||
A dedicated Docker image is provided and wired into `docker-compose.yaml`. The container listens on
|
||||
`${MCP_PORT:-8765}` and connects to the backend service running in the same compose stack.
|
||||
## MCP tool contract
|
||||
|
||||
The `search` tool accepts the following arguments:
|
||||
|
||||
- `keyword` (string, required): Search phrase passed directly to the OpenIsle API.
|
||||
- `scope` (string, optional): One of `global`, `posts`, `posts_content`, `posts_title`, or `users`. Defaults to `global`.
|
||||
- `limit` (integer, optional): Overrides the default limit from `OPENISLE_MCP_DEFAULT_LIMIT`.
|
||||
|
||||
The tool returns a JSON object containing both the raw API response and a normalised representation with concise titles, subtitles, and snippets for each result.
|
||||
|
||||
Future tools (for example posting or moderation helpers) can be added within this package and exposed via additional decorators without changing the deployment setup.
|
||||
|
||||
@@ -1,29 +1,30 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
requires = ["hatchling>=1.25.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "openisle-mcp"
|
||||
version = "0.1.0"
|
||||
description = "Model Context Protocol server exposing OpenIsle search capabilities"
|
||||
description = "Model Context Protocol server exposing OpenIsle search functionality."
|
||||
readme = "README.md"
|
||||
authors = [{name = "OpenIsle Team"}]
|
||||
license = {text = "MIT"}
|
||||
authors = [{name = "OpenIsle Contributors"}]
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"mcp>=1.19.0",
|
||||
"httpx>=0.28.0",
|
||||
"pydantic>=2.12.0",
|
||||
"fastmcp>=2.12.5",
|
||||
"httpx>=0.28.1",
|
||||
"pydantic>=2.7",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
openisle-mcp = "openisle_mcp.server:main"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "src"}
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/openisle_mcp"]
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
openisle_mcp = ["py.typed"]
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"src/openisle_mcp",
|
||||
"README.md",
|
||||
"pyproject.toml",
|
||||
]
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
"""OpenIsle MCP server package."""
|
||||
|
||||
from importlib import metadata
|
||||
from .server import main
|
||||
|
||||
try:
|
||||
__version__ = metadata.version("openisle-mcp")
|
||||
except metadata.PackageNotFoundError: # pragma: no cover - best effort during dev
|
||||
__version__ = "0.0.0"
|
||||
|
||||
__all__ = ["__version__"]
|
||||
__all__ = ["main"]
|
||||
|
||||
@@ -1,79 +1,218 @@
|
||||
"""HTTP client for talking to the OpenIsle backend."""
|
||||
"""HTTP client wrappers for interacting with the OpenIsle backend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
import html
|
||||
import re
|
||||
from typing import Any, Iterable
|
||||
|
||||
import httpx
|
||||
from pydantic import ValidationError
|
||||
|
||||
from .models import BackendSearchResult
|
||||
from .models import NormalizedSearchResult, SearchResponse, SearchScope
|
||||
from .settings import Settings
|
||||
|
||||
__all__ = ["BackendClientError", "OpenIsleBackendClient"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_TAG_RE = re.compile(r"<[^>]+>")
|
||||
_WHITESPACE_RE = re.compile(r"\s+")
|
||||
|
||||
|
||||
class BackendClientError(RuntimeError):
|
||||
"""Raised when the backend cannot fulfil a request."""
|
||||
class SearchClient:
|
||||
"""High level client around the OpenIsle search API."""
|
||||
|
||||
_ENDPOINTS: dict[SearchScope, str] = {
|
||||
SearchScope.GLOBAL: "/api/search/global",
|
||||
SearchScope.POSTS: "/api/search/posts",
|
||||
SearchScope.POSTS_CONTENT: "/api/search/posts/content",
|
||||
SearchScope.POSTS_TITLE: "/api/search/posts/title",
|
||||
SearchScope.USERS: "/api/search/users",
|
||||
}
|
||||
|
||||
def __init__(self, settings: Settings) -> None:
|
||||
self._base_url = settings.sanitized_base_url()
|
||||
self._timeout = settings.request_timeout
|
||||
self._default_limit = settings.default_limit
|
||||
self._snippet_length = settings.snippet_length
|
||||
self._client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
timeout=self._timeout,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
await self._client.aclose()
|
||||
|
||||
def endpoint_path(self, scope: SearchScope) -> str:
|
||||
return self._ENDPOINTS[scope]
|
||||
|
||||
def endpoint_url(self, scope: SearchScope) -> str:
|
||||
return f"{self._base_url}{self.endpoint_path(scope)}"
|
||||
|
||||
async def search(
|
||||
self,
|
||||
keyword: str,
|
||||
scope: SearchScope,
|
||||
*,
|
||||
limit: int | None = None,
|
||||
) -> SearchResponse:
|
||||
"""Execute a search request and normalise the results."""
|
||||
|
||||
keyword = keyword.strip()
|
||||
effective_limit = self._resolve_limit(limit)
|
||||
|
||||
if not keyword:
|
||||
return SearchResponse(
|
||||
keyword=keyword,
|
||||
scope=scope,
|
||||
endpoint=self.endpoint_url(scope),
|
||||
limit=effective_limit,
|
||||
total_results=0,
|
||||
returned_results=0,
|
||||
normalized=[],
|
||||
raw=[],
|
||||
)
|
||||
|
||||
response = await self._client.get(
|
||||
self.endpoint_path(scope),
|
||||
params={"keyword": keyword},
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list): # pragma: no cover - defensive programming
|
||||
raise ValueError("Search API did not return a JSON array")
|
||||
|
||||
total_results = len(payload)
|
||||
items = payload if effective_limit is None else payload[:effective_limit]
|
||||
normalized = [self._normalise_item(scope, item) for item in items]
|
||||
|
||||
return SearchResponse(
|
||||
keyword=keyword,
|
||||
scope=scope,
|
||||
endpoint=self.endpoint_url(scope),
|
||||
limit=effective_limit,
|
||||
total_results=total_results,
|
||||
returned_results=len(items),
|
||||
normalized=normalized,
|
||||
raw=items,
|
||||
)
|
||||
|
||||
def _resolve_limit(self, requested: int | None) -> int | None:
|
||||
value = requested if requested is not None else self._default_limit
|
||||
if value is None:
|
||||
return None
|
||||
if value <= 0:
|
||||
return None
|
||||
return value
|
||||
|
||||
def _normalise_item(
|
||||
self,
|
||||
scope: SearchScope,
|
||||
item: Any,
|
||||
) -> NormalizedSearchResult:
|
||||
"""Normalise raw API objects into a consistent structure."""
|
||||
|
||||
if not isinstance(item, dict): # pragma: no cover - defensive programming
|
||||
return NormalizedSearchResult(type=scope.value, metadata={"raw": item})
|
||||
|
||||
if scope == SearchScope.GLOBAL:
|
||||
return self._normalise_global(item)
|
||||
if scope in {SearchScope.POSTS, SearchScope.POSTS_CONTENT, SearchScope.POSTS_TITLE}:
|
||||
return self._normalise_post(item)
|
||||
if scope == SearchScope.USERS:
|
||||
return self._normalise_user(item)
|
||||
return NormalizedSearchResult(type=scope.value, metadata=item)
|
||||
|
||||
def _normalise_global(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
highlights = {
|
||||
"title": item.get("highlightedText"),
|
||||
"subtitle": item.get("highlightedSubText"),
|
||||
"snippet": item.get("highlightedExtra"),
|
||||
}
|
||||
snippet_source = highlights.get("snippet") or item.get("extra")
|
||||
metadata = {
|
||||
"postId": item.get("postId"),
|
||||
"highlights": {k: v for k, v in highlights.items() if v},
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type=str(item.get("type", "result")),
|
||||
id=_safe_int(item.get("id")),
|
||||
title=highlights.get("title") or _safe_str(item.get("text")),
|
||||
subtitle=highlights.get("subtitle") or _safe_str(item.get("subText")),
|
||||
snippet=self._snippet(snippet_source),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, {}, [])},
|
||||
)
|
||||
|
||||
def _normalise_post(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
author = _safe_dict(item.get("author"))
|
||||
category = _safe_dict(item.get("category"))
|
||||
tags = [tag.get("name") for tag in _safe_iter(item.get("tags")) if isinstance(tag, dict)]
|
||||
metadata = {
|
||||
"author": author.get("username"),
|
||||
"category": category.get("name"),
|
||||
"tags": tags,
|
||||
"views": item.get("views"),
|
||||
"commentCount": item.get("commentCount"),
|
||||
"status": item.get("status"),
|
||||
"apiUrl": f"{self._base_url}/api/posts/{item.get('id')}" if item.get("id") else None,
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type="post",
|
||||
id=_safe_int(item.get("id")),
|
||||
title=_safe_str(item.get("title")),
|
||||
subtitle=_safe_str(category.get("name")),
|
||||
snippet=self._snippet(item.get("content")),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, [], {})},
|
||||
)
|
||||
|
||||
def _normalise_user(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
metadata = {
|
||||
"followers": item.get("followers"),
|
||||
"following": item.get("following"),
|
||||
"totalViews": item.get("totalViews"),
|
||||
"role": item.get("role"),
|
||||
"subscribed": item.get("subscribed"),
|
||||
"apiUrl": f"{self._base_url}/api/users/{item.get('id')}" if item.get("id") else None,
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type="user",
|
||||
id=_safe_int(item.get("id")),
|
||||
title=_safe_str(item.get("username")),
|
||||
subtitle=_safe_str(item.get("email") or item.get("role")),
|
||||
snippet=self._snippet(item.get("introduction")),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, [], {})},
|
||||
)
|
||||
|
||||
def _snippet(self, value: Any) -> str | None:
|
||||
text = _safe_str(value)
|
||||
if not text:
|
||||
return None
|
||||
text = html.unescape(text)
|
||||
text = _TAG_RE.sub(" ", text)
|
||||
text = _WHITESPACE_RE.sub(" ", text).strip()
|
||||
if not text:
|
||||
return None
|
||||
if len(text) <= self._snippet_length:
|
||||
return text
|
||||
return text[: self._snippet_length - 1].rstrip() + "…"
|
||||
|
||||
|
||||
class OpenIsleBackendClient:
|
||||
"""Tiny wrapper around the Spring Boot search endpoints."""
|
||||
|
||||
def __init__(self, base_url: str, timeout: float = 10.0) -> None:
|
||||
if not base_url:
|
||||
raise ValueError("base_url must not be empty")
|
||||
self._base_url = base_url.rstrip("/")
|
||||
timeout = timeout if timeout > 0 else 10.0
|
||||
self._timeout = httpx.Timeout(timeout, connect=timeout, read=timeout)
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
return self._base_url
|
||||
|
||||
async def search_global(self, keyword: str) -> List[BackendSearchResult]:
|
||||
"""Call `/api/search/global` and normalise the payload."""
|
||||
|
||||
url = f"{self._base_url}/api/search/global"
|
||||
params = {"keyword": keyword}
|
||||
headers = {"Accept": "application/json"}
|
||||
logger.debug("Calling OpenIsle backend", extra={"url": url, "params": params})
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self._timeout, headers=headers, follow_redirects=True) as client:
|
||||
response = await client.get(url, params=params)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors are rare in tests
|
||||
body_preview = _truncate_body(exc.response.text)
|
||||
raise BackendClientError(
|
||||
f"Backend returned HTTP {exc.response.status_code}: {body_preview}"
|
||||
) from exc
|
||||
except httpx.RequestError as exc: # pragma: no cover - network errors are rare in tests
|
||||
raise BackendClientError(f"Failed to reach backend: {exc}") from exc
|
||||
|
||||
try:
|
||||
payload = response.json()
|
||||
except json.JSONDecodeError as exc:
|
||||
raise BackendClientError("Backend returned invalid JSON") from exc
|
||||
|
||||
if not isinstance(payload, list):
|
||||
raise BackendClientError("Unexpected search payload type; expected a list")
|
||||
|
||||
results: list[BackendSearchResult] = []
|
||||
for item in payload:
|
||||
try:
|
||||
results.append(BackendSearchResult.model_validate(item))
|
||||
except ValidationError as exc:
|
||||
raise BackendClientError(f"Invalid search result payload: {exc}") from exc
|
||||
|
||||
return results
|
||||
def _safe_int(value: Any) -> int | None:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError): # pragma: no cover - defensive
|
||||
return None
|
||||
|
||||
|
||||
def _truncate_body(body: str, limit: int = 200) -> str:
|
||||
body = body.strip()
|
||||
if len(body) <= limit:
|
||||
return body
|
||||
return f"{body[:limit]}…"
|
||||
def _safe_str(value: Any) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
return text or None
|
||||
|
||||
|
||||
def _safe_dict(value: Any) -> dict[str, Any]:
|
||||
return value if isinstance(value, dict) else {}
|
||||
|
||||
|
||||
def _safe_iter(value: Any) -> Iterable[Any]:
|
||||
if isinstance(value, list | tuple | set):
|
||||
return value
|
||||
return []
|
||||
|
||||
@@ -1,58 +1,71 @@
|
||||
"""Pydantic models used by the OpenIsle MCP server."""
|
||||
"""Shared models for the OpenIsle MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict, Optional
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
__all__ = [
|
||||
"BackendSearchResult",
|
||||
"SearchResult",
|
||||
"SearchResponse",
|
||||
]
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class BackendSearchResult(BaseModel):
|
||||
"""Shape of the payload returned by the OpenIsle backend."""
|
||||
class SearchScope(str, Enum):
|
||||
"""Supported search endpoints."""
|
||||
|
||||
type: str
|
||||
id: Optional[int] = None
|
||||
text: Optional[str] = None
|
||||
sub_text: Optional[str] = Field(default=None, alias="subText")
|
||||
extra: Optional[str] = None
|
||||
post_id: Optional[int] = Field(default=None, alias="postId")
|
||||
highlighted_text: Optional[str] = Field(default=None, alias="highlightedText")
|
||||
highlighted_sub_text: Optional[str] = Field(default=None, alias="highlightedSubText")
|
||||
highlighted_extra: Optional[str] = Field(default=None, alias="highlightedExtra")
|
||||
GLOBAL = "global"
|
||||
POSTS = "posts"
|
||||
POSTS_CONTENT = "posts_content"
|
||||
POSTS_TITLE = "posts_title"
|
||||
USERS = "users"
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="ignore")
|
||||
def __str__(self) -> str: # pragma: no cover - convenience for logging
|
||||
return self.value
|
||||
|
||||
|
||||
class SearchResult(BaseModel):
|
||||
"""Structured search result returned to MCP clients."""
|
||||
class NormalizedSearchResult(BaseModel):
|
||||
"""Compact structure returned by the MCP search tool."""
|
||||
|
||||
type: str = Field(description="Entity type, e.g. post, comment, user")
|
||||
id: Optional[int] = Field(default=None, description="Primary identifier for the entity")
|
||||
title: Optional[str] = Field(default=None, description="Primary text to display")
|
||||
subtitle: Optional[str] = Field(default=None, description="Secondary text (e.g. author or category)")
|
||||
extra: Optional[str] = Field(default=None, description="Additional descriptive snippet")
|
||||
post_id: Optional[int] = Field(default=None, description="Associated post id for comment results")
|
||||
url: Optional[str] = Field(default=None, description="Deep link to the resource inside OpenIsle")
|
||||
highlights: Dict[str, Optional[str]] = Field(
|
||||
type: str = Field(description="Entity type, e.g. user, post, comment.")
|
||||
id: int | None = Field(default=None, description="Primary identifier of the entity.")
|
||||
title: str | None = Field(default=None, description="Display title for the result.")
|
||||
subtitle: str | None = Field(default=None, description="Secondary line of context.")
|
||||
snippet: str | None = Field(default=None, description="Short summary of the result.")
|
||||
metadata: dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
description="Highlighted HTML fragments keyed by field name",
|
||||
description="Additional attributes extracted from the API response.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
}
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
"""Response envelope returned from the MCP search tool."""
|
||||
"""Payload returned to MCP clients."""
|
||||
|
||||
keyword: str = Field(description="Sanitised keyword that was searched for")
|
||||
total_results: int = Field(description="Total number of results returned by the backend")
|
||||
limit: int = Field(description="Maximum number of results included in the response")
|
||||
results: list[SearchResult] = Field(default_factory=list, description="Search results up to the requested limit")
|
||||
keyword: str
|
||||
scope: SearchScope
|
||||
endpoint: str
|
||||
limit: int | None = Field(
|
||||
default=None,
|
||||
description="Result limit applied to the request. None means unlimited.",
|
||||
)
|
||||
total_results: int = Field(
|
||||
default=0,
|
||||
description="Total number of items returned by the OpenIsle API before limiting.",
|
||||
)
|
||||
returned_results: int = Field(
|
||||
default=0,
|
||||
description="Number of items returned to the MCP client after limiting.",
|
||||
)
|
||||
normalized: list[NormalizedSearchResult] = Field(
|
||||
default_factory=list,
|
||||
description="Normalised representation of each search hit.",
|
||||
)
|
||||
raw: list[Any] = Field(
|
||||
default_factory=list,
|
||||
description="Raw response objects from the OpenIsle REST API.",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
}
|
||||
|
||||
@@ -1,164 +1,95 @@
|
||||
"""Entry point for the OpenIsle MCP server."""
|
||||
"""Entrypoint for the OpenIsle MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
from typing import Annotated, Optional
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from mcp.server.fastmcp import Context, FastMCP
|
||||
from mcp.server.fastmcp import exceptions as mcp_exceptions
|
||||
from pydantic import Field
|
||||
import httpx
|
||||
from fastmcp import Context, FastMCP
|
||||
|
||||
from .client import BackendClientError, OpenIsleBackendClient
|
||||
from .models import BackendSearchResult, SearchResponse, SearchResult
|
||||
from .client import SearchClient
|
||||
from .models import SearchResponse, SearchScope
|
||||
from .settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
__all__ = ["main"]
|
||||
|
||||
APP_NAME = "openisle-mcp"
|
||||
DEFAULT_BACKEND_URL = "http://springboot:8080"
|
||||
DEFAULT_TRANSPORT = "stdio"
|
||||
DEFAULT_TIMEOUT = 10.0
|
||||
DEFAULT_LIMIT = 20
|
||||
MAX_LIMIT = 50
|
||||
|
||||
server = FastMCP(
|
||||
APP_NAME,
|
||||
def _create_lifespan(settings: Settings):
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastMCP):
|
||||
client = SearchClient(settings)
|
||||
setattr(app, "_search_client", client)
|
||||
try:
|
||||
yield {"client": client}
|
||||
finally:
|
||||
await client.aclose()
|
||||
if hasattr(app, "_search_client"):
|
||||
delattr(app, "_search_client")
|
||||
|
||||
return lifespan
|
||||
|
||||
|
||||
_settings = Settings.from_env()
|
||||
|
||||
mcp = FastMCP(
|
||||
name="OpenIsle Search",
|
||||
version="0.1.0",
|
||||
instructions=(
|
||||
"Use the `search` tool to query OpenIsle content. "
|
||||
"Results include posts, comments, users, categories, and tags."
|
||||
"Provides access to OpenIsle search endpoints for retrieving users, posts, "
|
||||
"comments, tags, and categories."
|
||||
),
|
||||
lifespan=_create_lifespan(_settings),
|
||||
)
|
||||
|
||||
|
||||
def _env(name: str, default: Optional[str] = None) -> Optional[str]:
|
||||
value = os.getenv(name, default)
|
||||
if value is None:
|
||||
return None
|
||||
trimmed = value.strip()
|
||||
return trimmed or default
|
||||
@mcp.tool("search")
|
||||
async def search(
|
||||
keyword: str,
|
||||
scope: SearchScope = SearchScope.GLOBAL,
|
||||
limit: int | None = None,
|
||||
ctx: Context | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Perform a search against the OpenIsle backend."""
|
||||
|
||||
|
||||
def _load_timeout() -> float:
|
||||
raw = _env("OPENISLE_BACKEND_TIMEOUT", str(DEFAULT_TIMEOUT))
|
||||
client = _resolve_client(ctx)
|
||||
try:
|
||||
timeout = float(raw) if raw is not None else DEFAULT_TIMEOUT
|
||||
except ValueError:
|
||||
logger.warning("Invalid OPENISLE_BACKEND_TIMEOUT value '%s', falling back to %s", raw, DEFAULT_TIMEOUT)
|
||||
return DEFAULT_TIMEOUT
|
||||
if timeout <= 0:
|
||||
logger.warning("Non-positive OPENISLE_BACKEND_TIMEOUT %s, falling back to %s", timeout, DEFAULT_TIMEOUT)
|
||||
return DEFAULT_TIMEOUT
|
||||
return timeout
|
||||
response: SearchResponse = await client.search(keyword=keyword, scope=scope, limit=limit)
|
||||
except httpx.HTTPError as exc:
|
||||
message = f"OpenIsle search request failed: {exc}".rstrip()
|
||||
raise RuntimeError(message) from exc
|
||||
|
||||
|
||||
_BACKEND_CLIENT = OpenIsleBackendClient(
|
||||
base_url=_env("OPENISLE_BACKEND_URL", DEFAULT_BACKEND_URL) or DEFAULT_BACKEND_URL,
|
||||
timeout=_load_timeout(),
|
||||
)
|
||||
_PUBLIC_BASE_URL = _env("OPENISLE_PUBLIC_BASE_URL")
|
||||
|
||||
|
||||
def _build_url(result: BackendSearchResult) -> Optional[str]:
|
||||
if not _PUBLIC_BASE_URL:
|
||||
return None
|
||||
base = _PUBLIC_BASE_URL.rstrip("/")
|
||||
if result.type in {"post", "post_title"} and result.id is not None:
|
||||
return f"{base}/posts/{result.id}"
|
||||
if result.type == "comment" and result.post_id is not None:
|
||||
anchor = f"#comment-{result.id}" if result.id is not None else ""
|
||||
return f"{base}/posts/{result.post_id}{anchor}"
|
||||
if result.type == "user" and result.id is not None:
|
||||
return f"{base}/users/{result.id}"
|
||||
if result.type == "category" and result.id is not None:
|
||||
return f"{base}/?categoryId={result.id}"
|
||||
if result.type == "tag" and result.id is not None:
|
||||
return f"{base}/?tagIds={result.id}"
|
||||
return None
|
||||
|
||||
|
||||
def _to_search_result(result: BackendSearchResult) -> SearchResult:
|
||||
highlights = {
|
||||
"text": result.highlighted_text,
|
||||
"subText": result.highlighted_sub_text,
|
||||
"extra": result.highlighted_extra,
|
||||
payload = response.model_dump()
|
||||
payload["transport"] = {
|
||||
"scope": scope.value,
|
||||
"endpoint": client.endpoint_url(scope),
|
||||
}
|
||||
# Remove empty highlight entries to keep the payload clean
|
||||
highlights = {key: value for key, value in highlights.items() if value}
|
||||
return SearchResult(
|
||||
type=result.type,
|
||||
id=result.id,
|
||||
title=result.text,
|
||||
subtitle=result.sub_text,
|
||||
extra=result.extra,
|
||||
post_id=result.post_id,
|
||||
url=_build_url(result),
|
||||
highlights=highlights,
|
||||
)
|
||||
return payload
|
||||
|
||||
|
||||
KeywordParam = Annotated[str, Field(description="Keyword to search for", min_length=1)]
|
||||
LimitParam = Annotated[
|
||||
int,
|
||||
Field(ge=1, le=MAX_LIMIT, description=f"Maximum number of results to return (<= {MAX_LIMIT})"),
|
||||
]
|
||||
|
||||
|
||||
@server.tool(name="search", description="Search OpenIsle content")
|
||||
async def search(keyword: KeywordParam, limit: LimitParam = DEFAULT_LIMIT, ctx: Optional[Context] = None) -> SearchResponse:
|
||||
"""Run a search query against the OpenIsle backend."""
|
||||
|
||||
trimmed = keyword.strip()
|
||||
if not trimmed:
|
||||
raise mcp_exceptions.ToolError("Keyword must not be empty")
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.debug(f"Searching OpenIsle for '{trimmed}' (limit={limit})")
|
||||
|
||||
try:
|
||||
raw_results = await _BACKEND_CLIENT.search_global(trimmed)
|
||||
except BackendClientError as exc:
|
||||
if ctx is not None:
|
||||
await ctx.error(f"Search request failed: {exc}")
|
||||
raise mcp_exceptions.ToolError(f"Search failed: {exc}") from exc
|
||||
|
||||
results = [_to_search_result(result) for result in raw_results]
|
||||
limited = results[:limit]
|
||||
|
||||
if ctx is not None:
|
||||
await ctx.info(
|
||||
"Search completed",
|
||||
keyword=trimmed,
|
||||
total_results=len(results),
|
||||
returned=len(limited),
|
||||
)
|
||||
|
||||
return SearchResponse(keyword=trimmed, total_results=len(results), limit=limit, results=limited)
|
||||
def _resolve_client(ctx: Context | None) -> SearchClient:
|
||||
app = ctx.fastmcp if ctx is not None else mcp
|
||||
client = getattr(app, "_search_client", None)
|
||||
if client is None:
|
||||
raise RuntimeError("Search client is not initialised; lifespan hook not executed")
|
||||
return client
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run the OpenIsle MCP server")
|
||||
parser.add_argument(
|
||||
"--transport",
|
||||
choices=["stdio", "sse", "streamable-http"],
|
||||
default=_env("OPENISLE_MCP_TRANSPORT", DEFAULT_TRANSPORT),
|
||||
help="Transport protocol to use",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--mount-path",
|
||||
default=_env("OPENISLE_MCP_SSE_MOUNT_PATH", "/mcp"),
|
||||
help="Mount path when using the SSE transport",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
"""CLI entrypoint."""
|
||||
|
||||
logging.basicConfig(level=os.getenv("OPENISLE_MCP_LOG_LEVEL", "INFO"))
|
||||
logger.info(
|
||||
"Starting OpenIsle MCP server", extra={"transport": args.transport, "backend": _BACKEND_CLIENT.base_url}
|
||||
)
|
||||
transport = os.getenv("OPENISLE_MCP_TRANSPORT", "stdio").strip().lower()
|
||||
show_banner = os.getenv("OPENISLE_MCP_SHOW_BANNER", "true").lower() in {"1", "true", "yes"}
|
||||
run_kwargs: dict[str, Any] = {"show_banner": show_banner}
|
||||
|
||||
server.run(transport=args.transport, mount_path=args.mount_path)
|
||||
if transport in {"http", "sse", "streamable-http"}:
|
||||
host = os.getenv("OPENISLE_MCP_HOST", "127.0.0.1")
|
||||
port = int(os.getenv("OPENISLE_MCP_PORT", "8974"))
|
||||
run_kwargs.update({"host": host, "port": port})
|
||||
|
||||
mcp.run(transport=transport, **run_kwargs)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == "__main__": # pragma: no cover - manual execution guard
|
||||
main()
|
||||
|
||||
102
mcp/src/openisle_mcp/settings.py
Normal file
102
mcp/src/openisle_mcp/settings.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Environment configuration for the MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
|
||||
|
||||
class Settings(BaseModel):
|
||||
"""Runtime configuration sourced from environment variables."""
|
||||
|
||||
api_base_url: str = Field(
|
||||
default="http://springboot:8080",
|
||||
description="Base URL of the OpenIsle backend REST API.",
|
||||
)
|
||||
request_timeout: float = Field(
|
||||
default=10.0,
|
||||
description="Timeout in seconds for outgoing HTTP requests.",
|
||||
ge=0.1,
|
||||
)
|
||||
default_limit: int = Field(
|
||||
default=20,
|
||||
description="Default maximum number of results returned by the search tool.",
|
||||
)
|
||||
snippet_length: int = Field(
|
||||
default=160,
|
||||
description="Maximum length for the normalised snippet field.",
|
||||
ge=40,
|
||||
)
|
||||
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
"validate_assignment": True,
|
||||
}
|
||||
|
||||
@field_validator("api_base_url", mode="before")
|
||||
@classmethod
|
||||
def _strip_trailing_slash(cls, value: Any) -> Any:
|
||||
if isinstance(value, str):
|
||||
value = value.strip()
|
||||
if value.endswith("/"):
|
||||
return value.rstrip("/")
|
||||
return value
|
||||
|
||||
@field_validator("default_limit", mode="before")
|
||||
@classmethod
|
||||
def _parse_default_limit(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("default_limit must be an integer") from exc
|
||||
return value
|
||||
|
||||
@field_validator("snippet_length", mode="before")
|
||||
@classmethod
|
||||
def _parse_snippet_length(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("snippet_length must be an integer") from exc
|
||||
return value
|
||||
|
||||
@field_validator("request_timeout", mode="before")
|
||||
@classmethod
|
||||
def _parse_timeout(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("request_timeout must be a number") from exc
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "Settings":
|
||||
"""Build a settings object using environment variables."""
|
||||
|
||||
data: dict[str, Any] = {}
|
||||
mapping = {
|
||||
"api_base_url": "OPENISLE_API_BASE_URL",
|
||||
"request_timeout": "OPENISLE_API_TIMEOUT",
|
||||
"default_limit": "OPENISLE_MCP_DEFAULT_LIMIT",
|
||||
"snippet_length": "OPENISLE_MCP_SNIPPET_LENGTH",
|
||||
}
|
||||
for field, env_key in mapping.items():
|
||||
value = os.getenv(env_key)
|
||||
if value is not None and value != "":
|
||||
data[field] = value
|
||||
try:
|
||||
return cls.model_validate(data)
|
||||
except ValidationError as exc: # pragma: no cover - validation errors surface early
|
||||
raise ValueError(
|
||||
"Invalid MCP settings derived from environment variables"
|
||||
) from exc
|
||||
|
||||
def sanitized_base_url(self) -> str:
|
||||
"""Return the API base URL without trailing slashes."""
|
||||
|
||||
return self.api_base_url.rstrip("/")
|
||||
Reference in New Issue
Block a user