Compare commits

..

1 Commits

Author SHA1 Message Date
Tim
dfa7530373 feat: add MCP search server 2025-10-25 21:34:44 +08:00
17 changed files with 431 additions and 278 deletions

View File

@@ -2,15 +2,10 @@
SERVER_PORT=8080
FRONTEND_PORT=3000
WEBSOCKET_PORT=8082
MCP_PORT=9090
MYSQL_PORT=3306
REDIS_PORT=6379
RABBITMQ_PORT=5672
RABBITMQ_MANAGEMENT_PORT=15672
MCP_HOST=0.0.0.0
MCP_BACKEND_BASE_URL=http://springboot:8080
MCP_CONNECT_TIMEOUT=5
MCP_READ_TIMEOUT=10
# === OpenSearch Configuration ===
OPENSEARCH_PORT=9200

View File

@@ -40,12 +40,12 @@ echo "👉 Build images ..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=production \
frontend_service mcp-service
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=staging \
frontend_service mcp-service
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -178,38 +178,6 @@ services:
- dev
- prod
mcp-service:
build:
context: ..
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
MCP_HOST: ${MCP_HOST:-0.0.0.0}
MCP_PORT: ${MCP_PORT:-9090}
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
ports:
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
depends_on:
springboot:
condition: service_healthy
command: ["openisle-mcp"]
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
interval: 10s
timeout: 5s
retries: 30
start_period: 20s
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- prod
websocket-service:
image: maven:3.9-eclipse-temurin-17
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
@@ -245,6 +213,30 @@ services:
- dev_local_backend
- prod
mcp-server:
build:
context: ..
dockerfile: docker/mcp-service.Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
OPENISLE_API_BASE_URL: ${OPENISLE_API_BASE_URL:-http://springboot:8080}
OPENISLE_MCP_HOST: ${OPENISLE_MCP_HOST:-0.0.0.0}
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8000}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
ports:
- "${OPENISLE_MCP_PORT:-8000}:8000"
depends_on:
springboot:
condition: service_healthy
networks:
- openisle-network
profiles:
- dev
- dev_local_backend
- prod
frontend_dev:
image: node:20
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev

View File

@@ -0,0 +1,20 @@
FROM python:3.11-slim AS base
ENV PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1
WORKDIR /app
COPY mcp/pyproject.toml mcp/README.md ./
COPY mcp/src ./src
RUN pip install --upgrade pip \
&& pip install .
EXPOSE 8000
ENV OPENISLE_API_BASE_URL=http://springboot:8080 \
OPENISLE_MCP_HOST=0.0.0.0 \
OPENISLE_MCP_PORT=8000 \
OPENISLE_MCP_TRANSPORT=streamable-http
CMD ["openisle-mcp"]

6
mcp/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
__pycache__/
*.py[cod]
*.egg-info/
.build/
.venv/
.env

View File

@@ -1,21 +0,0 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl \
&& rm -rf /var/lib/apt/lists/*
COPY mcp/pyproject.toml ./pyproject.toml
COPY mcp/README.md ./README.md
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
EXPOSE 9090
CMD ["openisle-mcp"]

View File

@@ -1,34 +1,45 @@
# OpenIsle MCP Service
# OpenIsle MCP Server
This package hosts a lightweight Python service that exposes OpenIsle search
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
It currently forwards search requests to the main Spring Boot backend and
returns the aggregated results. The service is intentionally simple so we can
iterate quickly and extend it with additional tools (for example, post
creation) in future updates.
This package exposes a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server for OpenIsle.
The initial release focuses on surfacing the platform's search capabilities so that AI assistants can discover
users and posts directly through the existing REST API. Future iterations can expand this service with post
creation and other productivity tools.
## Local development
## Features
- 🔍 Keyword search across users and posts using the OpenIsle backend APIs
- ✅ Structured MCP tool response for downstream reasoning
- 🩺 Lightweight health check endpoint (`/health`) for container orchestration
- ⚙️ Configurable via environment variables with sensible defaults for Docker Compose
## Running locally
```bash
pip install -e ./mcp
openisle-mcp
cd mcp
pip install .
openisle-mcp # starts the MCP server on http://127.0.0.1:8000 by default
```
By default the server listens on port `9090` and expects the Spring Boot backend
at `http://localhost:8080`. Configure the behaviour with the following
environment variables:
By default the server targets `http://localhost:8080` for backend requests. Override the target by setting
`OPENISLE_API_BASE_URL` before starting the service.
- `MCP_PORT` HTTP port the MCP service should listen on (default: `9090`).
- `MCP_HOST` Bind host for the HTTP server (default: `0.0.0.0`).
- `MCP_BACKEND_BASE_URL` Base URL of the Spring Boot backend that provides the
search endpoints (default: `http://springboot:8080`).
- `MCP_CONNECT_TIMEOUT` Connection timeout (seconds) when calling the backend
(default: `5`).
- `MCP_READ_TIMEOUT` Read timeout (seconds) when calling the backend (default:
`10`).
## Environment variables
| Variable | Default | Description |
| -------- | ------- | ----------- |
| `OPENISLE_API_BASE_URL` | `http://localhost:8080` | Base URL of the OpenIsle backend API |
| `OPENISLE_MCP_HOST` | `127.0.0.1` | Hostname/interface for the MCP HTTP server |
| `OPENISLE_MCP_PORT` | `8000` | Port for the MCP HTTP server |
| `OPENISLE_MCP_TRANSPORT` | `streamable-http` | Transport mode (`stdio`, `sse`, or `streamable-http`) |
| `OPENISLE_MCP_TIMEOUT_SECONDS` | `10` | HTTP timeout when calling the backend |
## Docker
The repository contains a Dockerfile that builds a slim Python image running the
service with `uvicorn`. The compose configuration wires the container into the
existing OpenIsle stack so that deployments automatically start the MCP service.
The repository's Docker Compose stack now includes the MCP server. To start it alongside other services:
```bash
cd docker
docker compose --profile dev up mcp-server
```
The service exposes port `8000` by default. Update `OPENISLE_MCP_PORT` to customize the mapped port.

View File

@@ -1,25 +1,28 @@
[build-system]
requires = ["hatchling>=1.21.0"]
build-backend = "hatchling.build"
requires = ["setuptools>=68", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "openisle-mcp"
version = "0.1.0"
description = "Model Context Protocol server exposing OpenIsle search capabilities"
description = "Model Context Protocol server exposing OpenIsle search capabilities."
readme = "README.md"
authors = [
{ name = "OpenIsle" }
]
requires-python = ">=3.11"
authors = [{ name = "OpenIsle" }]
dependencies = [
"fastapi>=0.111.0,<1.0.0",
"uvicorn[standard]>=0.29.0,<0.31.0",
"httpx>=0.27.0,<0.28.0",
"pydantic>=2.7.0,<3.0.0"
"mcp>=1.19.0",
"httpx>=0.28.1",
"pydantic>=2.7.0"
]
[project.urls]
Homepage = "https://github.com/openisle/openisle"
[project.scripts]
openisle-mcp = "openisle_mcp.__main__:main"
openisle-mcp = "openisle_mcp.server:main"
[tool.hatch.build.targets.wheel]
packages = ["src/openisle_mcp"]
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.package-data]
openisle_mcp = ["py.typed"]

View File

@@ -1,6 +1,14 @@
"""OpenIsle MCP service package."""
"""OpenIsle MCP server package."""
from .config import Settings, get_settings
from .server import create_app
from .models import SearchItem, SearchResponse, SearchScope
__all__ = ["Settings", "get_settings", "create_app"]
__all__ = [
"Settings",
"get_settings",
"SearchItem",
"SearchResponse",
"SearchScope",
]
__version__ = "0.1.0"

View File

@@ -1,24 +0,0 @@
"""Entrypoint for running the MCP service with ``python -m``."""
from __future__ import annotations
import logging
import uvicorn
from .config import get_settings
def main() -> None:
settings = get_settings()
logging.basicConfig(level=logging.INFO)
uvicorn.run(
"openisle_mcp.server:create_app",
host=settings.host,
port=settings.port,
factory=True,
)
if __name__ == "__main__": # pragma: no cover
main()

View File

@@ -1,44 +1,33 @@
"""HTTP client helpers for talking to the Spring Boot backend."""
"""HTTP client helpers for interacting with the OpenIsle backend APIs."""
from __future__ import annotations
import logging
from typing import Any
import httpx
from .config import Settings
LOGGER = logging.getLogger(__name__)
from .config import Settings, get_settings
from .models import SearchScope
class SearchClient:
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
class OpenIsleAPI:
"""Thin wrapper around the OpenIsle REST API used by the MCP server."""
def __init__(self, settings: Settings):
timeout = httpx.Timeout(
connect=settings.connect_timeout,
read=settings.read_timeout,
write=settings.read_timeout,
pool=None,
)
self._client = httpx.AsyncClient(
base_url=settings.normalized_backend_base_url,
timeout=timeout,
)
def __init__(self, settings: Settings | None = None) -> None:
self._settings = settings or get_settings()
async def close(self) -> None:
await self._client.aclose()
async def search(self, scope: SearchScope, keyword: str) -> list[Any]:
"""Execute a search request against the backend API."""
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
LOGGER.debug("Performing global search for keyword '%s'", keyword)
response = await self._client.get("/api/search/global", params={"keyword": keyword})
response.raise_for_status()
payload = response.json()
if isinstance(payload, list):
return payload
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
return []
url_path = self._settings.get_search_path(scope)
async with httpx.AsyncClient(
base_url=str(self._settings.backend_base_url),
timeout=self._settings.request_timeout_seconds,
) as client:
response = await client.get(url_path, params={"keyword": keyword})
response.raise_for_status()
data = response.json()
__all__ = ["SearchClient"]
if not isinstance(data, list):
raise RuntimeError("Unexpected search response payload: expected a list")
return data

View File

@@ -1,71 +1,83 @@
"""Configuration helpers for the MCP service."""
"""Configuration helpers for the OpenIsle MCP server."""
from __future__ import annotations
import os
from functools import lru_cache
from typing import Any
from typing import Dict, Literal
from pydantic import BaseModel, ConfigDict, Field, ValidationError
from pydantic import AnyHttpUrl, BaseModel, Field, ValidationError
from .models import SearchScope
TransportType = Literal["stdio", "sse", "streamable-http"]
class Settings(BaseModel):
"""Application settings sourced from environment variables."""
"""Runtime configuration for the MCP server."""
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
backend_base_url: str = Field(
default="http://springboot:8080",
description="Base URL of the Spring Boot backend that provides search endpoints",
backend_base_url: AnyHttpUrl = Field(
default="http://localhost:8080",
description="Base URL of the OpenIsle backend API.",
)
connect_timeout: float = Field(
default=5.0,
ge=0.0,
description="Connection timeout when communicating with the backend (seconds)",
)
read_timeout: float = Field(
request_timeout_seconds: float = Field(
default=10.0,
ge=0.0,
description="Read timeout when communicating with the backend (seconds)",
gt=0,
description="HTTP timeout when talking to the backend APIs.",
)
transport: TransportType = Field(
default="streamable-http",
description="Transport mode for the MCP server.",
)
host: str = Field(default="127.0.0.1", description="Hostname/interface used by the MCP HTTP server.")
port: int = Field(default=8000, ge=0, description="Port used by the MCP HTTP server.")
search_paths: Dict[str, str] = Field(
default_factory=lambda: {
SearchScope.GLOBAL.value: "/api/search/global",
SearchScope.USERS.value: "/api/search/users",
SearchScope.POSTS.value: "/api/search/posts",
SearchScope.POSTS_TITLE.value: "/api/search/posts/title",
SearchScope.POSTS_CONTENT.value: "/api/search/posts/content",
},
description="Mapping between search scopes and backend API paths.",
)
model_config = ConfigDict(extra="ignore")
def get_search_path(self, scope: SearchScope) -> str:
"""Return the backend path associated with a given search scope."""
@property
def normalized_backend_base_url(self) -> str:
"""Return the backend base URL without a trailing slash."""
return self.backend_base_url.rstrip("/")
ENV_MAPPING: dict[str, str] = {
"host": "MCP_HOST",
"port": "MCP_PORT",
"backend_base_url": "MCP_BACKEND_BASE_URL",
"connect_timeout": "MCP_CONNECT_TIMEOUT",
"read_timeout": "MCP_READ_TIMEOUT",
}
def _load_environment_values() -> dict[str, Any]:
values: dict[str, Any] = {}
for field, env_name in ENV_MAPPING.items():
value = os.getenv(env_name)
if value is None:
continue
values[field] = value
return values
try:
return self.search_paths[scope.value]
except KeyError as exc: # pragma: no cover - defensive guard
raise ValueError(f"Unsupported search scope: {scope}") from exc
@lru_cache(maxsize=1)
def get_settings() -> Settings:
"""Load and validate application settings."""
"""Load settings from environment variables with caching."""
raw_settings: Dict[str, object] = {}
backend_url = os.getenv("OPENISLE_API_BASE_URL")
if backend_url:
raw_settings["backend_base_url"] = backend_url
timeout = os.getenv("OPENISLE_MCP_TIMEOUT_SECONDS")
if timeout:
raw_settings["request_timeout_seconds"] = float(timeout)
transport = os.getenv("OPENISLE_MCP_TRANSPORT")
if transport:
raw_settings["transport"] = transport
host = os.getenv("OPENISLE_MCP_HOST")
if host:
raw_settings["host"] = host
port = os.getenv("OPENISLE_MCP_PORT")
if port:
raw_settings["port"] = int(port)
values = _load_environment_values()
try:
return Settings(**values)
except ValidationError as exc: # pragma: no cover - defensive branch
raise RuntimeError("Invalid MCP configuration") from exc
__all__ = ["Settings", "get_settings"]
return Settings(**raw_settings)
except (ValidationError, ValueError) as exc: # pragma: no cover - configuration errors should surface clearly
raise RuntimeError(f"Invalid MCP configuration: {exc}") from exc

View File

@@ -1,38 +1,45 @@
"""Pydantic models shared across the MCP service."""
"""Data models for the OpenIsle MCP server."""
from __future__ import annotations
from typing import Optional
from enum import Enum
from typing import Any, Dict, Optional
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, Field
class SearchResult(BaseModel):
"""Representation of a single search result entry."""
class SearchScope(str, Enum):
"""Supported search scopes exposed via the MCP tool."""
model_config = ConfigDict(extra="ignore")
GLOBAL = "global"
USERS = "users"
POSTS = "posts"
POSTS_TITLE = "posts_title"
POSTS_CONTENT = "posts_content"
type: Optional[str] = Field(default=None, description="Type of the result entry")
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
extra: Optional[str] = Field(default=None, description="Additional information about the result")
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
highlightedSubText: Optional[str] = Field(
default=None,
description="Highlighted secondary text segment",
)
highlightedExtra: Optional[str] = Field(
default=None,
description="Highlighted additional information",
)
class Highlight(BaseModel):
"""Highlighted fragments returned by the backend search API."""
text: Optional[str] = Field(default=None, description="Highlighted main text snippet.")
sub_text: Optional[str] = Field(default=None, description="Highlighted secondary text snippet.")
extra: Optional[str] = Field(default=None, description="Additional highlighted data.")
class SearchItem(BaseModel):
"""Normalized representation of a single search result."""
category: str = Field(description="Type/category of the search result, e.g. user or post.")
title: Optional[str] = Field(default=None, description="Primary title or label for the result.")
description: Optional[str] = Field(default=None, description="Supporting description or summary text.")
url: Optional[str] = Field(default=None, description="Canonical URL that references the resource, if available.")
metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional structured metadata extracted from the API.")
highlights: Optional[Highlight] = Field(default=None, description="Highlighted snippets returned by the backend search API.")
class SearchResponse(BaseModel):
"""Response payload returned by the search endpoint."""
"""Structured response returned by the MCP search tool."""
results: list[SearchResult] = Field(default_factory=list)
__all__ = ["SearchResult", "SearchResponse"]
scope: SearchScope = Field(description="Scope of the search that produced the results.")
keyword: str = Field(description="Keyword submitted to the backend search endpoint.")
results: list[SearchItem] = Field(default_factory=list, description="Normalized search results from the backend API.")

View File

View File

@@ -0,0 +1,100 @@
"""Utilities for normalising OpenIsle search results."""
from __future__ import annotations
import re
from typing import Any, Iterable
from .models import Highlight, SearchItem, SearchScope
def _truncate(text: str | None, *, limit: int = 240) -> str | None:
"""Compress whitespace and truncate overly long text fragments."""
if not text:
return None
compact = re.sub(r"\s+", " ", text).strip()
if len(compact) <= limit:
return compact
return f"{compact[:limit - 1]}"
def _extract_highlight(data: dict[str, Any]) -> Highlight | None:
highlighted = {
"text": data.get("highlightedText"),
"sub_text": data.get("highlightedSubText"),
"extra": data.get("highlightedExtra"),
}
if any(highlighted.values()):
return Highlight(**highlighted)
return None
def normalise_results(scope: SearchScope, payload: Iterable[dict[str, Any]]) -> list[SearchItem]:
"""Convert backend payloads into :class:`SearchItem` entries."""
normalised: list[SearchItem] = []
for item in payload:
if not isinstance(item, dict):
continue
if scope is SearchScope.GLOBAL:
normalised.append(
SearchItem(
category=item.get("type", scope.value),
title=_truncate(item.get("text")),
description=_truncate(item.get("subText")),
metadata={
"id": item.get("id"),
"postId": item.get("postId"),
"extra": item.get("extra"),
},
highlights=_extract_highlight(item),
)
)
continue
if scope in {SearchScope.POSTS, SearchScope.POSTS_CONTENT, SearchScope.POSTS_TITLE}:
author = item.get("author") or {}
category = item.get("category") or {}
metadata = {
"id": item.get("id"),
"author": author.get("username"),
"category": category.get("name"),
"views": item.get("views"),
"commentCount": item.get("commentCount"),
"tags": [tag.get("name") for tag in item.get("tags", []) if isinstance(tag, dict)],
}
normalised.append(
SearchItem(
category="post",
title=_truncate(item.get("title")),
description=_truncate(item.get("content")),
metadata={k: v for k, v in metadata.items() if v is not None},
)
)
continue
if scope is SearchScope.USERS:
metadata = {
"id": item.get("id"),
"email": item.get("email"),
"followers": item.get("followers"),
"following": item.get("following"),
"role": item.get("role"),
}
normalised.append(
SearchItem(
category="user",
title=_truncate(item.get("username")),
description=_truncate(item.get("introduction")),
metadata={k: v for k, v in metadata.items() if v is not None},
)
)
continue
# Fallback: include raw entry to aid debugging of unsupported scopes
normalised.append(SearchItem(category=scope.value, metadata=item))
return normalised

View File

@@ -1,66 +1,121 @@
"""FastAPI application exposing the MCP server endpoints."""
"""Entry point for the OpenIsle MCP server."""
from __future__ import annotations
import logging
import os
from typing import Annotated
from fastapi import Depends, FastAPI, HTTPException, Query, Request
import httpx
from mcp.server.fastmcp import Context, FastMCP
from mcp.server.fastmcp.logging import configure_logging
from pydantic import Field
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from .client import SearchClient
from .config import get_settings
from .models import SearchResponse, SearchResult
from .client import OpenIsleAPI
from .config import Settings, get_settings
from .models import SearchResponse, SearchScope
from .search import normalise_results
LOGGER = logging.getLogger(__name__)
_logger = logging.getLogger(__name__)
async def _lifespan(app: FastAPI):
settings = get_settings()
client = SearchClient(settings)
app.state.settings = settings
app.state.search_client = client
LOGGER.info(
"Starting MCP server on %s:%s targeting backend %s",
settings.host,
settings.port,
settings.normalized_backend_base_url,
def _create_server(settings: Settings) -> FastMCP:
"""Instantiate the FastMCP server with configured metadata."""
server = FastMCP(
name="OpenIsle MCP",
instructions=(
"Access OpenIsle search functionality. Provide a keyword and optionally a scope to "
"discover users and posts from the community."
),
host=settings.host,
port=settings.port,
transport_security=None,
)
try:
yield
finally:
LOGGER.info("Shutting down MCP server")
await client.close()
@server.custom_route("/health", methods=["GET"])
async def health(_: Request) -> Response: # pragma: no cover - exercised via runtime checks
return JSONResponse({"status": "ok"})
return server
def create_app() -> FastAPI:
"""Create and configure the FastAPI application."""
async def _execute_search(
*,
api: OpenIsleAPI,
scope: SearchScope,
keyword: str,
context: Context | None,
) -> SearchResponse:
message = f"Searching OpenIsle scope={scope.value} keyword={keyword!r}"
if context is not None:
context.info(message)
else:
_logger.info(message)
app = FastAPI(title="OpenIsle MCP Server", lifespan=_lifespan)
payload = await api.search(scope, keyword)
items = normalise_results(scope, payload)
return SearchResponse(scope=scope, keyword=keyword, results=items)
@app.get("/healthz", tags=["health"])
async def healthcheck() -> dict[str, str]:
return {"status": "ok"}
async def get_client(request: Request) -> SearchClient:
return request.app.state.search_client
def build_server(settings: Settings | None = None) -> FastMCP:
"""Configure and return the FastMCP server instance."""
@app.get("/search", response_model=SearchResponse, tags=["search"])
async def search(
keyword: str = Query(..., min_length=1, description="Keyword to search for"),
client: SearchClient = Depends(get_client),
resolved_settings = settings or get_settings()
server = _create_server(resolved_settings)
api_client = OpenIsleAPI(resolved_settings)
@server.tool(
name="openisle_search",
description="Search OpenIsle for users and posts.",
)
async def openisle_search(
keyword: Annotated[str, Field(description="Keyword used to query OpenIsle search.")],
scope: Annotated[
SearchScope,
Field(
description=(
"Scope of the search. Use 'global' to search across users and posts, or specify "
"'users', 'posts', 'posts_title', or 'posts_content' to narrow the results."
)
),
] = SearchScope.GLOBAL,
context: Context | None = None,
) -> SearchResponse:
try:
raw_results = await client.global_search(keyword)
except httpx.HTTPStatusError as exc:
LOGGER.warning("Backend responded with error %s", exc.response.status_code)
raise HTTPException(status_code=exc.response.status_code, detail="Backend error") from exc
except httpx.HTTPError as exc:
LOGGER.error("Failed to reach backend: %s", exc)
raise HTTPException(status_code=503, detail="Search service unavailable") from exc
results = [SearchResult.model_validate(item) for item in raw_results]
return SearchResponse(results=results)
return await _execute_search(api=api_client, scope=scope, keyword=keyword, context=context)
except Exception as exc: # pragma: no cover - surfaced to the MCP runtime
error_message = f"Search failed: {exc}"
if context is not None:
context.error(error_message)
_logger.exception("Search tool failed")
raise
return app
return server
__all__ = ["create_app"]
def main() -> None:
"""CLI entry point used by the console script."""
settings = get_settings()
configure_logging("INFO")
server = build_server(settings)
transport = os.getenv("OPENISLE_MCP_TRANSPORT", settings.transport)
if transport not in {"stdio", "sse", "streamable-http"}:
raise RuntimeError(f"Unsupported transport mode: {transport}")
_logger.info("Starting OpenIsle MCP server on %s:%s via %s", settings.host, settings.port, transport)
if transport == "stdio":
server.run("stdio")
elif transport == "sse":
mount_path = os.getenv("OPENISLE_MCP_SSE_PATH")
server.run("sse", mount_path=mount_path)
else:
server.run("streamable-http")
if __name__ == "__main__": # pragma: no cover - manual execution path
main()