mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-27 16:40:50 +08:00
Compare commits
1 Commits
codex/crea
...
codex/crea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab91ec2489 |
14
.env.example
14
.env.example
@@ -2,19 +2,15 @@
|
||||
SERVER_PORT=8080
|
||||
FRONTEND_PORT=3000
|
||||
WEBSOCKET_PORT=8082
|
||||
MCP_PORT=9090
|
||||
MYSQL_PORT=3306
|
||||
REDIS_PORT=6379
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_MANAGEMENT_PORT=15672
|
||||
|
||||
# === MCP Server ===
|
||||
OPENISLE_MCP_TRANSPORT=http
|
||||
OPENISLE_MCP_HOST=0.0.0.0
|
||||
OPENISLE_MCP_PORT=8974
|
||||
OPENISLE_API_BASE_URL=http://springboot:8080
|
||||
OPENISLE_API_TIMEOUT=10
|
||||
OPENISLE_MCP_DEFAULT_LIMIT=20
|
||||
OPENISLE_MCP_SNIPPET_LENGTH=160
|
||||
MCP_HOST=0.0.0.0
|
||||
MCP_BACKEND_BASE_URL=http://springboot:8080
|
||||
MCP_CONNECT_TIMEOUT=5
|
||||
MCP_READ_TIMEOUT=10
|
||||
|
||||
# === OpenSearch Configuration ===
|
||||
OPENSEARCH_PORT=9200
|
||||
|
||||
@@ -28,7 +28,6 @@ OpenIsle 是一个使用 Spring Boot 和 Vue 3 构建的全栈开源社区平台
|
||||
- 支持图片上传,默认使用腾讯云 COS 扩展
|
||||
- 默认头像使用 DiceBear Avatars,可通过 `AVATAR_STYLE` 和 `AVATAR_SIZE` 环境变量自定义主题和大小
|
||||
- 浏览器推送通知,离开网站也能及时收到提醒
|
||||
- 新增 Python MCP 搜索服务,方便 AI 助手通过统一协议检索社区内容
|
||||
|
||||
## 🌟 项目优势
|
||||
|
||||
|
||||
@@ -40,12 +40,12 @@ echo "👉 Build images ..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
build --pull \
|
||||
--build-arg NUXT_ENV=production \
|
||||
frontend_service
|
||||
frontend_service mcp-service
|
||||
|
||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
up -d --force-recreate --remove-orphans --no-deps \
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service
|
||||
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
|
||||
|
||||
echo "👉 Current status:"
|
||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||
|
||||
@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
build --pull \
|
||||
--build-arg NUXT_ENV=staging \
|
||||
frontend_service
|
||||
frontend_service mcp-service
|
||||
|
||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||
up -d --force-recreate --remove-orphans --no-deps \
|
||||
mysql redis rabbitmq websocket-service springboot frontend_service
|
||||
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
|
||||
|
||||
echo "👉 Current status:"
|
||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||
|
||||
@@ -178,32 +178,36 @@ services:
|
||||
- dev
|
||||
- prod
|
||||
|
||||
mcp-server:
|
||||
mcp-service:
|
||||
build:
|
||||
context: ../mcp
|
||||
dockerfile: Dockerfile
|
||||
context: ..
|
||||
dockerfile: mcp/Dockerfile
|
||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||
env_file:
|
||||
- ${ENV_FILE:-../.env}
|
||||
environment:
|
||||
OPENISLE_API_BASE_URL: ${OPENISLE_API_BASE_URL:-http://springboot:8080}
|
||||
OPENISLE_API_TIMEOUT: ${OPENISLE_API_TIMEOUT:-10}
|
||||
OPENISLE_MCP_DEFAULT_LIMIT: ${OPENISLE_MCP_DEFAULT_LIMIT:-20}
|
||||
OPENISLE_MCP_SNIPPET_LENGTH: ${OPENISLE_MCP_SNIPPET_LENGTH:-160}
|
||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-http}
|
||||
OPENISLE_MCP_HOST: 0.0.0.0
|
||||
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8974}
|
||||
MCP_HOST: ${MCP_HOST:-0.0.0.0}
|
||||
MCP_PORT: ${MCP_PORT:-9090}
|
||||
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
|
||||
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
|
||||
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
|
||||
ports:
|
||||
- "${OPENISLE_MCP_PORT:-8974}:${OPENISLE_MCP_PORT:-8974}"
|
||||
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
|
||||
depends_on:
|
||||
springboot:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
command: ["openisle-mcp"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 30
|
||||
start_period: 20s
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- openisle-network
|
||||
profiles:
|
||||
- dev
|
||||
- dev_local_backend
|
||||
- prod
|
||||
|
||||
websocket-service:
|
||||
|
||||
@@ -1,27 +1,21 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM python:3.11-slim AS base
|
||||
FROM python:3.11-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY pyproject.toml README.md ./
|
||||
COPY src ./src
|
||||
COPY mcp/pyproject.toml ./pyproject.toml
|
||||
COPY mcp/README.md ./README.md
|
||||
COPY mcp/src ./src
|
||||
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install --no-cache-dir . \
|
||||
&& pip cache purge
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir .
|
||||
|
||||
ENV OPENISLE_MCP_TRANSPORT=http \
|
||||
OPENISLE_MCP_HOST=0.0.0.0 \
|
||||
OPENISLE_MCP_PORT=8974
|
||||
EXPOSE 9090
|
||||
|
||||
EXPOSE 8974
|
||||
|
||||
ENTRYPOINT ["openisle-mcp"]
|
||||
CMD ["openisle-mcp"]
|
||||
|
||||
@@ -1,51 +1,34 @@
|
||||
# OpenIsle MCP Server
|
||||
# OpenIsle MCP Service
|
||||
|
||||
This package provides a Python implementation of a Model Context Protocol (MCP) server for OpenIsle. The server focuses on the community search APIs so that AI assistants and other MCP-aware clients can discover OpenIsle users, posts, categories, comments, and tags. Additional capabilities such as content creation tools can be layered on later without changing the transport or deployment model.
|
||||
|
||||
## Features
|
||||
|
||||
- ✅ Implements the MCP tooling interface using [FastMCP](https://github.com/modelcontextprotocol/fastmcp).
|
||||
- 🔍 Exposes a `search` tool that proxies requests to the existing OpenIsle REST endpoints and normalises the response payload.
|
||||
- ⚙️ Configurable through environment variables for API base URL, timeout, result limits, and snippet size.
|
||||
- 🐳 Packaged with a Docker image so it can be launched alongside the other OpenIsle services.
|
||||
|
||||
## Environment variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
| --- | --- | --- |
|
||||
| `OPENISLE_API_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend REST API. |
|
||||
| `OPENISLE_API_TIMEOUT` | `10` | Timeout (in seconds) used when calling the backend search endpoints. |
|
||||
| `OPENISLE_MCP_DEFAULT_LIMIT` | `20` | Default maximum number of search results to return when the caller does not provide a limit. Use `0` or a negative number to disable limiting. |
|
||||
| `OPENISLE_MCP_SNIPPET_LENGTH` | `160` | Maximum length (in characters) of the normalised summary snippet returned by the MCP tool. |
|
||||
| `OPENISLE_MCP_TRANSPORT` | `stdio` | Transport used when running the server directly. Set to `http` when running inside Docker. |
|
||||
| `OPENISLE_MCP_HOST` | `127.0.0.1` | Bind host used when the transport is HTTP/SSE. |
|
||||
| `OPENISLE_MCP_PORT` | `8974` | Bind port used when the transport is HTTP/SSE. |
|
||||
This package hosts a lightweight Python service that exposes OpenIsle search
|
||||
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
|
||||
It currently forwards search requests to the main Spring Boot backend and
|
||||
returns the aggregated results. The service is intentionally simple so we can
|
||||
iterate quickly and extend it with additional tools (for example, post
|
||||
creation) in future updates.
|
||||
|
||||
## Local development
|
||||
|
||||
```bash
|
||||
cd mcp
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -e .
|
||||
OPENISLE_API_BASE_URL=http://localhost:8080 OPENISLE_MCP_TRANSPORT=http openisle-mcp
|
||||
pip install -e ./mcp
|
||||
openisle-mcp
|
||||
```
|
||||
|
||||
By default the server listens over stdio, which is useful when integrating with MCP-aware IDEs. When the `OPENISLE_MCP_TRANSPORT` variable is set to `http` the server will expose an HTTP transport on `OPENISLE_MCP_HOST:OPENISLE_MCP_PORT`.
|
||||
By default the server listens on port `9090` and expects the Spring Boot backend
|
||||
at `http://localhost:8080`. Configure the behaviour with the following
|
||||
environment variables:
|
||||
|
||||
## Docker image
|
||||
- `MCP_PORT` – HTTP port the MCP service should listen on (default: `9090`).
|
||||
- `MCP_HOST` – Bind host for the HTTP server (default: `0.0.0.0`).
|
||||
- `MCP_BACKEND_BASE_URL` – Base URL of the Spring Boot backend that provides the
|
||||
search endpoints (default: `http://springboot:8080`).
|
||||
- `MCP_CONNECT_TIMEOUT` – Connection timeout (seconds) when calling the backend
|
||||
(default: `5`).
|
||||
- `MCP_READ_TIMEOUT` – Read timeout (seconds) when calling the backend (default:
|
||||
`10`).
|
||||
|
||||
The accompanying `Dockerfile` builds a minimal image that installs the package and starts the MCP server. The root Docker Compose manifest is configured to launch this service and connect it to the same internal network as the Spring Boot API so the MCP tools can reach the search endpoints.
|
||||
## Docker
|
||||
|
||||
## MCP tool contract
|
||||
|
||||
The `search` tool accepts the following arguments:
|
||||
|
||||
- `keyword` (string, required): Search phrase passed directly to the OpenIsle API.
|
||||
- `scope` (string, optional): One of `global`, `posts`, `posts_content`, `posts_title`, or `users`. Defaults to `global`.
|
||||
- `limit` (integer, optional): Overrides the default limit from `OPENISLE_MCP_DEFAULT_LIMIT`.
|
||||
|
||||
The tool returns a JSON object containing both the raw API response and a normalised representation with concise titles, subtitles, and snippets for each result.
|
||||
|
||||
Future tools (for example posting or moderation helpers) can be added within this package and exposed via additional decorators without changing the deployment setup.
|
||||
The repository contains a Dockerfile that builds a slim Python image running the
|
||||
service with `uvicorn`. The compose configuration wires the container into the
|
||||
existing OpenIsle stack so that deployments automatically start the MCP service.
|
||||
|
||||
@@ -1,30 +1,25 @@
|
||||
[build-system]
|
||||
requires = ["hatchling>=1.25.0"]
|
||||
requires = ["hatchling>=1.21.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "openisle-mcp"
|
||||
version = "0.1.0"
|
||||
description = "Model Context Protocol server exposing OpenIsle search functionality."
|
||||
description = "Model Context Protocol server exposing OpenIsle search capabilities"
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
authors = [{name = "OpenIsle Contributors"}]
|
||||
authors = [
|
||||
{ name = "OpenIsle" }
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"fastmcp>=2.12.5",
|
||||
"httpx>=0.28.1",
|
||||
"pydantic>=2.7",
|
||||
"fastapi>=0.111.0,<1.0.0",
|
||||
"uvicorn[standard]>=0.29.0,<0.31.0",
|
||||
"httpx>=0.27.0,<0.28.0",
|
||||
"pydantic>=2.7.0,<3.0.0"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
openisle-mcp = "openisle_mcp.server:main"
|
||||
openisle-mcp = "openisle_mcp.__main__:main"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/openisle_mcp"]
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"src/openisle_mcp",
|
||||
"README.md",
|
||||
"pyproject.toml",
|
||||
]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""OpenIsle MCP server package."""
|
||||
"""OpenIsle MCP service package."""
|
||||
|
||||
from .server import main
|
||||
from .config import Settings, get_settings
|
||||
from .server import create_app
|
||||
|
||||
__all__ = ["main"]
|
||||
__all__ = ["Settings", "get_settings", "create_app"]
|
||||
|
||||
24
mcp/src/openisle_mcp/__main__.py
Normal file
24
mcp/src/openisle_mcp/__main__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Entrypoint for running the MCP service with ``python -m``."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import uvicorn
|
||||
|
||||
from .config import get_settings
|
||||
|
||||
|
||||
def main() -> None:
|
||||
settings = get_settings()
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
uvicorn.run(
|
||||
"openisle_mcp.server:create_app",
|
||||
host=settings.host,
|
||||
port=settings.port,
|
||||
factory=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
main()
|
||||
@@ -1,218 +1,44 @@
|
||||
"""HTTP client wrappers for interacting with the OpenIsle backend."""
|
||||
"""HTTP client helpers for talking to the Spring Boot backend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import re
|
||||
from typing import Any, Iterable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
from .models import NormalizedSearchResult, SearchResponse, SearchScope
|
||||
from .settings import Settings
|
||||
from .config import Settings
|
||||
|
||||
_TAG_RE = re.compile(r"<[^>]+>")
|
||||
_WHITESPACE_RE = re.compile(r"\s+")
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchClient:
|
||||
"""High level client around the OpenIsle search API."""
|
||||
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
|
||||
|
||||
_ENDPOINTS: dict[SearchScope, str] = {
|
||||
SearchScope.GLOBAL: "/api/search/global",
|
||||
SearchScope.POSTS: "/api/search/posts",
|
||||
SearchScope.POSTS_CONTENT: "/api/search/posts/content",
|
||||
SearchScope.POSTS_TITLE: "/api/search/posts/title",
|
||||
SearchScope.USERS: "/api/search/users",
|
||||
}
|
||||
|
||||
def __init__(self, settings: Settings) -> None:
|
||||
self._base_url = settings.sanitized_base_url()
|
||||
self._timeout = settings.request_timeout
|
||||
self._default_limit = settings.default_limit
|
||||
self._snippet_length = settings.snippet_length
|
||||
def __init__(self, settings: Settings):
|
||||
timeout = httpx.Timeout(
|
||||
connect=settings.connect_timeout,
|
||||
read=settings.read_timeout,
|
||||
write=settings.read_timeout,
|
||||
pool=None,
|
||||
)
|
||||
self._client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
timeout=self._timeout,
|
||||
headers={"Accept": "application/json"},
|
||||
base_url=settings.normalized_backend_base_url,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
async def close(self) -> None:
|
||||
await self._client.aclose()
|
||||
|
||||
def endpoint_path(self, scope: SearchScope) -> str:
|
||||
return self._ENDPOINTS[scope]
|
||||
|
||||
def endpoint_url(self, scope: SearchScope) -> str:
|
||||
return f"{self._base_url}{self.endpoint_path(scope)}"
|
||||
|
||||
async def search(
|
||||
self,
|
||||
keyword: str,
|
||||
scope: SearchScope,
|
||||
*,
|
||||
limit: int | None = None,
|
||||
) -> SearchResponse:
|
||||
"""Execute a search request and normalise the results."""
|
||||
|
||||
keyword = keyword.strip()
|
||||
effective_limit = self._resolve_limit(limit)
|
||||
|
||||
if not keyword:
|
||||
return SearchResponse(
|
||||
keyword=keyword,
|
||||
scope=scope,
|
||||
endpoint=self.endpoint_url(scope),
|
||||
limit=effective_limit,
|
||||
total_results=0,
|
||||
returned_results=0,
|
||||
normalized=[],
|
||||
raw=[],
|
||||
)
|
||||
|
||||
response = await self._client.get(
|
||||
self.endpoint_path(scope),
|
||||
params={"keyword": keyword},
|
||||
)
|
||||
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
|
||||
LOGGER.debug("Performing global search for keyword '%s'", keyword)
|
||||
response = await self._client.get("/api/search/global", params={"keyword": keyword})
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
if not isinstance(payload, list): # pragma: no cover - defensive programming
|
||||
raise ValueError("Search API did not return a JSON array")
|
||||
|
||||
total_results = len(payload)
|
||||
items = payload if effective_limit is None else payload[:effective_limit]
|
||||
normalized = [self._normalise_item(scope, item) for item in items]
|
||||
|
||||
return SearchResponse(
|
||||
keyword=keyword,
|
||||
scope=scope,
|
||||
endpoint=self.endpoint_url(scope),
|
||||
limit=effective_limit,
|
||||
total_results=total_results,
|
||||
returned_results=len(items),
|
||||
normalized=normalized,
|
||||
raw=items,
|
||||
)
|
||||
|
||||
def _resolve_limit(self, requested: int | None) -> int | None:
|
||||
value = requested if requested is not None else self._default_limit
|
||||
if value is None:
|
||||
return None
|
||||
if value <= 0:
|
||||
return None
|
||||
return value
|
||||
|
||||
def _normalise_item(
|
||||
self,
|
||||
scope: SearchScope,
|
||||
item: Any,
|
||||
) -> NormalizedSearchResult:
|
||||
"""Normalise raw API objects into a consistent structure."""
|
||||
|
||||
if not isinstance(item, dict): # pragma: no cover - defensive programming
|
||||
return NormalizedSearchResult(type=scope.value, metadata={"raw": item})
|
||||
|
||||
if scope == SearchScope.GLOBAL:
|
||||
return self._normalise_global(item)
|
||||
if scope in {SearchScope.POSTS, SearchScope.POSTS_CONTENT, SearchScope.POSTS_TITLE}:
|
||||
return self._normalise_post(item)
|
||||
if scope == SearchScope.USERS:
|
||||
return self._normalise_user(item)
|
||||
return NormalizedSearchResult(type=scope.value, metadata=item)
|
||||
|
||||
def _normalise_global(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
highlights = {
|
||||
"title": item.get("highlightedText"),
|
||||
"subtitle": item.get("highlightedSubText"),
|
||||
"snippet": item.get("highlightedExtra"),
|
||||
}
|
||||
snippet_source = highlights.get("snippet") or item.get("extra")
|
||||
metadata = {
|
||||
"postId": item.get("postId"),
|
||||
"highlights": {k: v for k, v in highlights.items() if v},
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type=str(item.get("type", "result")),
|
||||
id=_safe_int(item.get("id")),
|
||||
title=highlights.get("title") or _safe_str(item.get("text")),
|
||||
subtitle=highlights.get("subtitle") or _safe_str(item.get("subText")),
|
||||
snippet=self._snippet(snippet_source),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, {}, [])},
|
||||
)
|
||||
|
||||
def _normalise_post(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
author = _safe_dict(item.get("author"))
|
||||
category = _safe_dict(item.get("category"))
|
||||
tags = [tag.get("name") for tag in _safe_iter(item.get("tags")) if isinstance(tag, dict)]
|
||||
metadata = {
|
||||
"author": author.get("username"),
|
||||
"category": category.get("name"),
|
||||
"tags": tags,
|
||||
"views": item.get("views"),
|
||||
"commentCount": item.get("commentCount"),
|
||||
"status": item.get("status"),
|
||||
"apiUrl": f"{self._base_url}/api/posts/{item.get('id')}" if item.get("id") else None,
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type="post",
|
||||
id=_safe_int(item.get("id")),
|
||||
title=_safe_str(item.get("title")),
|
||||
subtitle=_safe_str(category.get("name")),
|
||||
snippet=self._snippet(item.get("content")),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, [], {})},
|
||||
)
|
||||
|
||||
def _normalise_user(self, item: dict[str, Any]) -> NormalizedSearchResult:
|
||||
metadata = {
|
||||
"followers": item.get("followers"),
|
||||
"following": item.get("following"),
|
||||
"totalViews": item.get("totalViews"),
|
||||
"role": item.get("role"),
|
||||
"subscribed": item.get("subscribed"),
|
||||
"apiUrl": f"{self._base_url}/api/users/{item.get('id')}" if item.get("id") else None,
|
||||
}
|
||||
return NormalizedSearchResult(
|
||||
type="user",
|
||||
id=_safe_int(item.get("id")),
|
||||
title=_safe_str(item.get("username")),
|
||||
subtitle=_safe_str(item.get("email") or item.get("role")),
|
||||
snippet=self._snippet(item.get("introduction")),
|
||||
metadata={k: v for k, v in metadata.items() if v not in (None, [], {})},
|
||||
)
|
||||
|
||||
def _snippet(self, value: Any) -> str | None:
|
||||
text = _safe_str(value)
|
||||
if not text:
|
||||
return None
|
||||
text = html.unescape(text)
|
||||
text = _TAG_RE.sub(" ", text)
|
||||
text = _WHITESPACE_RE.sub(" ", text).strip()
|
||||
if not text:
|
||||
return None
|
||||
if len(text) <= self._snippet_length:
|
||||
return text
|
||||
return text[: self._snippet_length - 1].rstrip() + "…"
|
||||
if isinstance(payload, list):
|
||||
return payload
|
||||
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
|
||||
return []
|
||||
|
||||
|
||||
def _safe_int(value: Any) -> int | None:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError): # pragma: no cover - defensive
|
||||
return None
|
||||
|
||||
|
||||
def _safe_str(value: Any) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
text = str(value).strip()
|
||||
return text or None
|
||||
|
||||
|
||||
def _safe_dict(value: Any) -> dict[str, Any]:
|
||||
return value if isinstance(value, dict) else {}
|
||||
|
||||
|
||||
def _safe_iter(value: Any) -> Iterable[Any]:
|
||||
if isinstance(value, list | tuple | set):
|
||||
return value
|
||||
return []
|
||||
__all__ = ["SearchClient"]
|
||||
|
||||
71
mcp/src/openisle_mcp/config.py
Normal file
71
mcp/src/openisle_mcp/config.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Configuration helpers for the MCP service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from functools import lru_cache
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
||||
|
||||
|
||||
class Settings(BaseModel):
|
||||
"""Application settings sourced from environment variables."""
|
||||
|
||||
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
|
||||
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
|
||||
backend_base_url: str = Field(
|
||||
default="http://springboot:8080",
|
||||
description="Base URL of the Spring Boot backend that provides search endpoints",
|
||||
)
|
||||
connect_timeout: float = Field(
|
||||
default=5.0,
|
||||
ge=0.0,
|
||||
description="Connection timeout when communicating with the backend (seconds)",
|
||||
)
|
||||
read_timeout: float = Field(
|
||||
default=10.0,
|
||||
ge=0.0,
|
||||
description="Read timeout when communicating with the backend (seconds)",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(extra="ignore")
|
||||
|
||||
@property
|
||||
def normalized_backend_base_url(self) -> str:
|
||||
"""Return the backend base URL without a trailing slash."""
|
||||
|
||||
return self.backend_base_url.rstrip("/")
|
||||
|
||||
|
||||
ENV_MAPPING: dict[str, str] = {
|
||||
"host": "MCP_HOST",
|
||||
"port": "MCP_PORT",
|
||||
"backend_base_url": "MCP_BACKEND_BASE_URL",
|
||||
"connect_timeout": "MCP_CONNECT_TIMEOUT",
|
||||
"read_timeout": "MCP_READ_TIMEOUT",
|
||||
}
|
||||
|
||||
|
||||
def _load_environment_values() -> dict[str, Any]:
|
||||
values: dict[str, Any] = {}
|
||||
for field, env_name in ENV_MAPPING.items():
|
||||
value = os.getenv(env_name)
|
||||
if value is None:
|
||||
continue
|
||||
values[field] = value
|
||||
return values
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_settings() -> Settings:
|
||||
"""Load and validate application settings."""
|
||||
|
||||
values = _load_environment_values()
|
||||
try:
|
||||
return Settings(**values)
|
||||
except ValidationError as exc: # pragma: no cover - defensive branch
|
||||
raise RuntimeError("Invalid MCP configuration") from exc
|
||||
|
||||
|
||||
__all__ = ["Settings", "get_settings"]
|
||||
@@ -1,71 +1,38 @@
|
||||
"""Shared models for the OpenIsle MCP server."""
|
||||
"""Pydantic models shared across the MCP service."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class SearchScope(str, Enum):
|
||||
"""Supported search endpoints."""
|
||||
class SearchResult(BaseModel):
|
||||
"""Representation of a single search result entry."""
|
||||
|
||||
GLOBAL = "global"
|
||||
POSTS = "posts"
|
||||
POSTS_CONTENT = "posts_content"
|
||||
POSTS_TITLE = "posts_title"
|
||||
USERS = "users"
|
||||
model_config = ConfigDict(extra="ignore")
|
||||
|
||||
def __str__(self) -> str: # pragma: no cover - convenience for logging
|
||||
return self.value
|
||||
|
||||
|
||||
class NormalizedSearchResult(BaseModel):
|
||||
"""Compact structure returned by the MCP search tool."""
|
||||
|
||||
type: str = Field(description="Entity type, e.g. user, post, comment.")
|
||||
id: int | None = Field(default=None, description="Primary identifier of the entity.")
|
||||
title: str | None = Field(default=None, description="Display title for the result.")
|
||||
subtitle: str | None = Field(default=None, description="Secondary line of context.")
|
||||
snippet: str | None = Field(default=None, description="Short summary of the result.")
|
||||
metadata: dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
description="Additional attributes extracted from the API response.",
|
||||
type: Optional[str] = Field(default=None, description="Type of the result entry")
|
||||
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
|
||||
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
|
||||
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
|
||||
extra: Optional[str] = Field(default=None, description="Additional information about the result")
|
||||
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
|
||||
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
|
||||
highlightedSubText: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Highlighted secondary text segment",
|
||||
)
|
||||
highlightedExtra: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Highlighted additional information",
|
||||
)
|
||||
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
}
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
"""Payload returned to MCP clients."""
|
||||
"""Response payload returned by the search endpoint."""
|
||||
|
||||
keyword: str
|
||||
scope: SearchScope
|
||||
endpoint: str
|
||||
limit: int | None = Field(
|
||||
default=None,
|
||||
description="Result limit applied to the request. None means unlimited.",
|
||||
)
|
||||
total_results: int = Field(
|
||||
default=0,
|
||||
description="Total number of items returned by the OpenIsle API before limiting.",
|
||||
)
|
||||
returned_results: int = Field(
|
||||
default=0,
|
||||
description="Number of items returned to the MCP client after limiting.",
|
||||
)
|
||||
normalized: list[NormalizedSearchResult] = Field(
|
||||
default_factory=list,
|
||||
description="Normalised representation of each search hit.",
|
||||
)
|
||||
raw: list[Any] = Field(
|
||||
default_factory=list,
|
||||
description="Raw response objects from the OpenIsle REST API.",
|
||||
)
|
||||
results: list[SearchResult] = Field(default_factory=list)
|
||||
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
}
|
||||
|
||||
__all__ = ["SearchResult", "SearchResponse"]
|
||||
|
||||
@@ -1,95 +1,66 @@
|
||||
"""Entrypoint for the OpenIsle MCP server."""
|
||||
"""FastAPI application exposing the MCP server endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
import logging
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException, Query, Request
|
||||
import httpx
|
||||
from fastmcp import Context, FastMCP
|
||||
|
||||
from .client import SearchClient
|
||||
from .models import SearchResponse, SearchScope
|
||||
from .settings import Settings
|
||||
from .config import get_settings
|
||||
from .models import SearchResponse, SearchResult
|
||||
|
||||
__all__ = ["main"]
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _create_lifespan(settings: Settings):
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastMCP):
|
||||
client = SearchClient(settings)
|
||||
setattr(app, "_search_client", client)
|
||||
try:
|
||||
yield {"client": client}
|
||||
finally:
|
||||
await client.aclose()
|
||||
if hasattr(app, "_search_client"):
|
||||
delattr(app, "_search_client")
|
||||
|
||||
return lifespan
|
||||
|
||||
|
||||
_settings = Settings.from_env()
|
||||
|
||||
mcp = FastMCP(
|
||||
name="OpenIsle Search",
|
||||
version="0.1.0",
|
||||
instructions=(
|
||||
"Provides access to OpenIsle search endpoints for retrieving users, posts, "
|
||||
"comments, tags, and categories."
|
||||
),
|
||||
lifespan=_create_lifespan(_settings),
|
||||
)
|
||||
|
||||
|
||||
@mcp.tool("search")
|
||||
async def search(
|
||||
keyword: str,
|
||||
scope: SearchScope = SearchScope.GLOBAL,
|
||||
limit: int | None = None,
|
||||
ctx: Context | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Perform a search against the OpenIsle backend."""
|
||||
|
||||
client = _resolve_client(ctx)
|
||||
async def _lifespan(app: FastAPI):
|
||||
settings = get_settings()
|
||||
client = SearchClient(settings)
|
||||
app.state.settings = settings
|
||||
app.state.search_client = client
|
||||
LOGGER.info(
|
||||
"Starting MCP server on %s:%s targeting backend %s",
|
||||
settings.host,
|
||||
settings.port,
|
||||
settings.normalized_backend_base_url,
|
||||
)
|
||||
try:
|
||||
response: SearchResponse = await client.search(keyword=keyword, scope=scope, limit=limit)
|
||||
except httpx.HTTPError as exc:
|
||||
message = f"OpenIsle search request failed: {exc}".rstrip()
|
||||
raise RuntimeError(message) from exc
|
||||
|
||||
payload = response.model_dump()
|
||||
payload["transport"] = {
|
||||
"scope": scope.value,
|
||||
"endpoint": client.endpoint_url(scope),
|
||||
}
|
||||
return payload
|
||||
yield
|
||||
finally:
|
||||
LOGGER.info("Shutting down MCP server")
|
||||
await client.close()
|
||||
|
||||
|
||||
def _resolve_client(ctx: Context | None) -> SearchClient:
|
||||
app = ctx.fastmcp if ctx is not None else mcp
|
||||
client = getattr(app, "_search_client", None)
|
||||
if client is None:
|
||||
raise RuntimeError("Search client is not initialised; lifespan hook not executed")
|
||||
return client
|
||||
def create_app() -> FastAPI:
|
||||
"""Create and configure the FastAPI application."""
|
||||
|
||||
app = FastAPI(title="OpenIsle MCP Server", lifespan=_lifespan)
|
||||
|
||||
@app.get("/healthz", tags=["health"])
|
||||
async def healthcheck() -> dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
|
||||
async def get_client(request: Request) -> SearchClient:
|
||||
return request.app.state.search_client
|
||||
|
||||
@app.get("/search", response_model=SearchResponse, tags=["search"])
|
||||
async def search(
|
||||
keyword: str = Query(..., min_length=1, description="Keyword to search for"),
|
||||
client: SearchClient = Depends(get_client),
|
||||
) -> SearchResponse:
|
||||
try:
|
||||
raw_results = await client.global_search(keyword)
|
||||
except httpx.HTTPStatusError as exc:
|
||||
LOGGER.warning("Backend responded with error %s", exc.response.status_code)
|
||||
raise HTTPException(status_code=exc.response.status_code, detail="Backend error") from exc
|
||||
except httpx.HTTPError as exc:
|
||||
LOGGER.error("Failed to reach backend: %s", exc)
|
||||
raise HTTPException(status_code=503, detail="Search service unavailable") from exc
|
||||
results = [SearchResult.model_validate(item) for item in raw_results]
|
||||
return SearchResponse(results=results)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""CLI entrypoint."""
|
||||
|
||||
transport = os.getenv("OPENISLE_MCP_TRANSPORT", "stdio").strip().lower()
|
||||
show_banner = os.getenv("OPENISLE_MCP_SHOW_BANNER", "true").lower() in {"1", "true", "yes"}
|
||||
run_kwargs: dict[str, Any] = {"show_banner": show_banner}
|
||||
|
||||
if transport in {"http", "sse", "streamable-http"}:
|
||||
host = os.getenv("OPENISLE_MCP_HOST", "127.0.0.1")
|
||||
port = int(os.getenv("OPENISLE_MCP_PORT", "8974"))
|
||||
run_kwargs.update({"host": host, "port": port})
|
||||
|
||||
mcp.run(transport=transport, **run_kwargs)
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover - manual execution guard
|
||||
main()
|
||||
__all__ = ["create_app"]
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
"""Environment configuration for the MCP server."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
|
||||
|
||||
class Settings(BaseModel):
|
||||
"""Runtime configuration sourced from environment variables."""
|
||||
|
||||
api_base_url: str = Field(
|
||||
default="http://springboot:8080",
|
||||
description="Base URL of the OpenIsle backend REST API.",
|
||||
)
|
||||
request_timeout: float = Field(
|
||||
default=10.0,
|
||||
description="Timeout in seconds for outgoing HTTP requests.",
|
||||
ge=0.1,
|
||||
)
|
||||
default_limit: int = Field(
|
||||
default=20,
|
||||
description="Default maximum number of results returned by the search tool.",
|
||||
)
|
||||
snippet_length: int = Field(
|
||||
default=160,
|
||||
description="Maximum length for the normalised snippet field.",
|
||||
ge=40,
|
||||
)
|
||||
|
||||
model_config = {
|
||||
"extra": "ignore",
|
||||
"validate_assignment": True,
|
||||
}
|
||||
|
||||
@field_validator("api_base_url", mode="before")
|
||||
@classmethod
|
||||
def _strip_trailing_slash(cls, value: Any) -> Any:
|
||||
if isinstance(value, str):
|
||||
value = value.strip()
|
||||
if value.endswith("/"):
|
||||
return value.rstrip("/")
|
||||
return value
|
||||
|
||||
@field_validator("default_limit", mode="before")
|
||||
@classmethod
|
||||
def _parse_default_limit(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("default_limit must be an integer") from exc
|
||||
return value
|
||||
|
||||
@field_validator("snippet_length", mode="before")
|
||||
@classmethod
|
||||
def _parse_snippet_length(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("snippet_length must be an integer") from exc
|
||||
return value
|
||||
|
||||
@field_validator("request_timeout", mode="before")
|
||||
@classmethod
|
||||
def _parse_timeout(cls, value: Any) -> Any:
|
||||
if isinstance(value, str) and value.strip():
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("request_timeout must be a number") from exc
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "Settings":
|
||||
"""Build a settings object using environment variables."""
|
||||
|
||||
data: dict[str, Any] = {}
|
||||
mapping = {
|
||||
"api_base_url": "OPENISLE_API_BASE_URL",
|
||||
"request_timeout": "OPENISLE_API_TIMEOUT",
|
||||
"default_limit": "OPENISLE_MCP_DEFAULT_LIMIT",
|
||||
"snippet_length": "OPENISLE_MCP_SNIPPET_LENGTH",
|
||||
}
|
||||
for field, env_key in mapping.items():
|
||||
value = os.getenv(env_key)
|
||||
if value is not None and value != "":
|
||||
data[field] = value
|
||||
try:
|
||||
return cls.model_validate(data)
|
||||
except ValidationError as exc: # pragma: no cover - validation errors surface early
|
||||
raise ValueError(
|
||||
"Invalid MCP settings derived from environment variables"
|
||||
) from exc
|
||||
|
||||
def sanitized_base_url(self) -> str:
|
||||
"""Return the API base URL without trailing slashes."""
|
||||
|
||||
return self.api_base_url.rstrip("/")
|
||||
Reference in New Issue
Block a user