Compare commits

..

1 Commits

Author SHA1 Message Date
Tim
28efd376b6 feat: add MCP search service 2025-10-24 17:06:13 +08:00
15 changed files with 352 additions and 295 deletions

View File

@@ -2,15 +2,10 @@
SERVER_PORT=8080
FRONTEND_PORT=3000
WEBSOCKET_PORT=8082
MCP_PORT=9090
MYSQL_PORT=3306
REDIS_PORT=6379
RABBITMQ_PORT=5672
RABBITMQ_MANAGEMENT_PORT=15672
MCP_HOST=0.0.0.0
MCP_BACKEND_BASE_URL=http://springboot:8080
MCP_CONNECT_TIMEOUT=5
MCP_READ_TIMEOUT=10
# === OpenSearch Configuration ===
OPENSEARCH_PORT=9200

1
.gitignore vendored
View File

@@ -17,6 +17,7 @@ dist
# misc
.DS_Store
__pycache__/
*.pem
npm-debug.log*
yarn-debug.log*

View File

@@ -40,12 +40,12 @@ echo "👉 Build images ..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=production \
frontend_service mcp-service
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=staging \
frontend_service mcp-service
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -178,38 +178,6 @@ services:
- dev
- prod
mcp-service:
build:
context: ..
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
MCP_HOST: ${MCP_HOST:-0.0.0.0}
MCP_PORT: ${MCP_PORT:-9090}
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
ports:
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
depends_on:
springboot:
condition: service_healthy
command: ["openisle-mcp"]
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
interval: 10s
timeout: 5s
retries: 30
start_period: 20s
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- prod
websocket-service:
image: maven:3.9-eclipse-temurin-17
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
@@ -245,6 +213,32 @@ services:
- dev_local_backend
- prod
mcp-service:
build:
context: ..
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
FASTMCP_HOST: 0.0.0.0
FASTMCP_PORT: ${MCP_PORT:-8765}
OPENISLE_BACKEND_URL: ${OPENISLE_BACKEND_URL:-http://springboot:8080}
OPENISLE_BACKEND_TIMEOUT: ${OPENISLE_BACKEND_TIMEOUT:-10}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-sse}
OPENISLE_MCP_SSE_MOUNT_PATH: ${OPENISLE_MCP_SSE_MOUNT_PATH:-/mcp}
ports:
- "${MCP_PORT:-8765}:${MCP_PORT:-8765}"
depends_on:
springboot:
condition: service_healthy
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- prod
frontend_dev:
image: node:20
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev

View File

@@ -1,21 +1,17 @@
FROM python:3.11-slim
FROM python:3.11-slim AS runtime
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
ENV PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl \
&& rm -rf /var/lib/apt/lists/*
COPY mcp/pyproject.toml /app/pyproject.toml
COPY mcp/README.md /app/README.md
COPY mcp/src /app/src
COPY mcp/pyproject.toml ./pyproject.toml
COPY mcp/README.md ./README.md
COPY mcp/src ./src
RUN pip install --upgrade pip \
&& pip install .
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
EXPOSE 9090
EXPOSE 8765
CMD ["openisle-mcp"]

View File

@@ -1,34 +1,39 @@
# OpenIsle MCP Service
# OpenIsle MCP Server
This package hosts a lightweight Python service that exposes OpenIsle search
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
It currently forwards search requests to the main Spring Boot backend and
returns the aggregated results. The service is intentionally simple so we can
iterate quickly and extend it with additional tools (for example, post
creation) in future updates.
This package provides a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server that exposes the OpenIsle
search capabilities to AI assistants. The server wraps the existing Spring Boot backend and currently provides a single `search`
tool. Future iterations can extend the server with additional functionality such as publishing new posts or moderating content.
## Features
- 🔍 **Global search** — delegates to the existing `/api/search/global` endpoint exposed by the OpenIsle backend.
- 🧠 **Structured results** — responses include highlights and deep links so AI clients can present the results cleanly.
- ⚙️ **Configurable** — point the server at any reachable OpenIsle backend by setting environment variables.
## Local development
```bash
pip install -e ./mcp
openisle-mcp
cd mcp
python -m venv .venv
source .venv/bin/activate
pip install -e .
openisle-mcp --transport stdio # or "sse"/"streamable-http"
```
By default the server listens on port `9090` and expects the Spring Boot backend
at `http://localhost:8080`. Configure the behaviour with the following
environment variables:
Environment variables:
- `MCP_PORT` HTTP port the MCP service should listen on (default: `9090`).
- `MCP_HOST` Bind host for the HTTP server (default: `0.0.0.0`).
- `MCP_BACKEND_BASE_URL` Base URL of the Spring Boot backend that provides the
search endpoints (default: `http://springboot:8080`).
- `MCP_CONNECT_TIMEOUT` Connection timeout (seconds) when calling the backend
(default: `5`).
- `MCP_READ_TIMEOUT` Read timeout (seconds) when calling the backend (default:
`10`).
| Variable | Description | Default |
| --- | --- | --- |
| `OPENISLE_BACKEND_URL` | Base URL of the Spring Boot backend | `http://springboot:8080` |
| `OPENISLE_BACKEND_TIMEOUT` | Timeout (seconds) for backend HTTP calls | `10` |
| `OPENISLE_PUBLIC_BASE_URL` | Optional base URL used to build deep links in search results | *(unset)* |
| `OPENISLE_MCP_TRANSPORT` | MCP transport (`stdio`, `sse`, `streamable-http`) | `stdio` |
| `OPENISLE_MCP_SSE_MOUNT_PATH` | Mount path when using SSE transport | `/mcp` |
| `FASTMCP_HOST` | Host for SSE / HTTP transports | `127.0.0.1` |
| `FASTMCP_PORT` | Port for SSE / HTTP transports | `8000` |
## Docker
The repository contains a Dockerfile that builds a slim Python image running the
service with `uvicorn`. The compose configuration wires the container into the
existing OpenIsle stack so that deployments automatically start the MCP service.
A dedicated Docker image is provided and wired into `docker-compose.yaml`. The container listens on
`${MCP_PORT:-8765}` and connects to the backend service running in the same compose stack.

View File

@@ -1,25 +1,29 @@
[build-system]
requires = ["hatchling>=1.21.0"]
build-backend = "hatchling.build"
requires = ["setuptools>=68", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "openisle-mcp"
version = "0.1.0"
description = "Model Context Protocol server exposing OpenIsle search capabilities"
readme = "README.md"
authors = [
{ name = "OpenIsle" }
]
authors = [{name = "OpenIsle Team"}]
license = {text = "MIT"}
requires-python = ">=3.11"
dependencies = [
"fastapi>=0.111.0,<1.0.0",
"uvicorn[standard]>=0.29.0,<0.31.0",
"httpx>=0.27.0,<0.28.0",
"pydantic>=2.7.0,<3.0.0"
"mcp>=1.19.0",
"httpx>=0.28.0",
"pydantic>=2.12.0",
]
[project.scripts]
openisle-mcp = "openisle_mcp.__main__:main"
openisle-mcp = "openisle_mcp.server:main"
[tool.hatch.build.targets.wheel]
packages = ["src/openisle_mcp"]
[tool.setuptools]
package-dir = {"" = "src"}
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.package-data]
openisle_mcp = ["py.typed"]

View File

@@ -1,6 +1,10 @@
"""OpenIsle MCP service package."""
"""OpenIsle MCP server package."""
from .config import Settings, get_settings
from .server import create_app
from importlib import metadata
__all__ = ["Settings", "get_settings", "create_app"]
try:
__version__ = metadata.version("openisle-mcp")
except metadata.PackageNotFoundError: # pragma: no cover - best effort during dev
__version__ = "0.0.0"
__all__ = ["__version__"]

View File

@@ -1,24 +0,0 @@
"""Entrypoint for running the MCP service with ``python -m``."""
from __future__ import annotations
import logging
import uvicorn
from .config import get_settings
def main() -> None:
settings = get_settings()
logging.basicConfig(level=logging.INFO)
uvicorn.run(
"openisle_mcp.server:create_app",
host=settings.host,
port=settings.port,
factory=True,
)
if __name__ == "__main__": # pragma: no cover
main()

View File

@@ -1,44 +1,79 @@
"""HTTP client helpers for talking to the Spring Boot backend."""
"""HTTP client for talking to the OpenIsle backend."""
from __future__ import annotations
import json
import logging
from typing import Any
from typing import List
import httpx
from pydantic import ValidationError
from .config import Settings
from .models import BackendSearchResult
LOGGER = logging.getLogger(__name__)
__all__ = ["BackendClientError", "OpenIsleBackendClient"]
logger = logging.getLogger(__name__)
class SearchClient:
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
def __init__(self, settings: Settings):
timeout = httpx.Timeout(
connect=settings.connect_timeout,
read=settings.read_timeout,
write=settings.read_timeout,
pool=None,
)
self._client = httpx.AsyncClient(
base_url=settings.normalized_backend_base_url,
timeout=timeout,
)
async def close(self) -> None:
await self._client.aclose()
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
LOGGER.debug("Performing global search for keyword '%s'", keyword)
response = await self._client.get("/api/search/global", params={"keyword": keyword})
response.raise_for_status()
payload = response.json()
if isinstance(payload, list):
return payload
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
return []
class BackendClientError(RuntimeError):
"""Raised when the backend cannot fulfil a request."""
__all__ = ["SearchClient"]
class OpenIsleBackendClient:
"""Tiny wrapper around the Spring Boot search endpoints."""
def __init__(self, base_url: str, timeout: float = 10.0) -> None:
if not base_url:
raise ValueError("base_url must not be empty")
self._base_url = base_url.rstrip("/")
timeout = timeout if timeout > 0 else 10.0
self._timeout = httpx.Timeout(timeout, connect=timeout, read=timeout)
@property
def base_url(self) -> str:
return self._base_url
async def search_global(self, keyword: str) -> List[BackendSearchResult]:
"""Call `/api/search/global` and normalise the payload."""
url = f"{self._base_url}/api/search/global"
params = {"keyword": keyword}
headers = {"Accept": "application/json"}
logger.debug("Calling OpenIsle backend", extra={"url": url, "params": params})
try:
async with httpx.AsyncClient(timeout=self._timeout, headers=headers, follow_redirects=True) as client:
response = await client.get(url, params=params)
response.raise_for_status()
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors are rare in tests
body_preview = _truncate_body(exc.response.text)
raise BackendClientError(
f"Backend returned HTTP {exc.response.status_code}: {body_preview}"
) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors are rare in tests
raise BackendClientError(f"Failed to reach backend: {exc}") from exc
try:
payload = response.json()
except json.JSONDecodeError as exc:
raise BackendClientError("Backend returned invalid JSON") from exc
if not isinstance(payload, list):
raise BackendClientError("Unexpected search payload type; expected a list")
results: list[BackendSearchResult] = []
for item in payload:
try:
results.append(BackendSearchResult.model_validate(item))
except ValidationError as exc:
raise BackendClientError(f"Invalid search result payload: {exc}") from exc
return results
def _truncate_body(body: str, limit: int = 200) -> str:
body = body.strip()
if len(body) <= limit:
return body
return f"{body[:limit]}"

View File

@@ -1,71 +0,0 @@
"""Configuration helpers for the MCP service."""
from __future__ import annotations
import os
from functools import lru_cache
from typing import Any
from pydantic import BaseModel, ConfigDict, Field, ValidationError
class Settings(BaseModel):
"""Application settings sourced from environment variables."""
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
backend_base_url: str = Field(
default="http://springboot:8080",
description="Base URL of the Spring Boot backend that provides search endpoints",
)
connect_timeout: float = Field(
default=5.0,
ge=0.0,
description="Connection timeout when communicating with the backend (seconds)",
)
read_timeout: float = Field(
default=10.0,
ge=0.0,
description="Read timeout when communicating with the backend (seconds)",
)
model_config = ConfigDict(extra="ignore")
@property
def normalized_backend_base_url(self) -> str:
"""Return the backend base URL without a trailing slash."""
return self.backend_base_url.rstrip("/")
ENV_MAPPING: dict[str, str] = {
"host": "MCP_HOST",
"port": "MCP_PORT",
"backend_base_url": "MCP_BACKEND_BASE_URL",
"connect_timeout": "MCP_CONNECT_TIMEOUT",
"read_timeout": "MCP_READ_TIMEOUT",
}
def _load_environment_values() -> dict[str, Any]:
values: dict[str, Any] = {}
for field, env_name in ENV_MAPPING.items():
value = os.getenv(env_name)
if value is None:
continue
values[field] = value
return values
@lru_cache(maxsize=1)
def get_settings() -> Settings:
"""Load and validate application settings."""
values = _load_environment_values()
try:
return Settings(**values)
except ValidationError as exc: # pragma: no cover - defensive branch
raise RuntimeError("Invalid MCP configuration") from exc
__all__ = ["Settings", "get_settings"]

View File

@@ -1,38 +1,58 @@
"""Pydantic models shared across the MCP service."""
"""Pydantic models used by the OpenIsle MCP server."""
from __future__ import annotations
from typing import Optional
from typing import Dict, Optional
from pydantic import BaseModel, ConfigDict, Field
__all__ = [
"BackendSearchResult",
"SearchResult",
"SearchResponse",
]
class BackendSearchResult(BaseModel):
"""Shape of the payload returned by the OpenIsle backend."""
type: str
id: Optional[int] = None
text: Optional[str] = None
sub_text: Optional[str] = Field(default=None, alias="subText")
extra: Optional[str] = None
post_id: Optional[int] = Field(default=None, alias="postId")
highlighted_text: Optional[str] = Field(default=None, alias="highlightedText")
highlighted_sub_text: Optional[str] = Field(default=None, alias="highlightedSubText")
highlighted_extra: Optional[str] = Field(default=None, alias="highlightedExtra")
model_config = ConfigDict(populate_by_name=True, extra="ignore")
class SearchResult(BaseModel):
"""Representation of a single search result entry."""
"""Structured search result returned to MCP clients."""
model_config = ConfigDict(extra="ignore")
type: str = Field(description="Entity type, e.g. post, comment, user")
id: Optional[int] = Field(default=None, description="Primary identifier for the entity")
title: Optional[str] = Field(default=None, description="Primary text to display")
subtitle: Optional[str] = Field(default=None, description="Secondary text (e.g. author or category)")
extra: Optional[str] = Field(default=None, description="Additional descriptive snippet")
post_id: Optional[int] = Field(default=None, description="Associated post id for comment results")
url: Optional[str] = Field(default=None, description="Deep link to the resource inside OpenIsle")
highlights: Dict[str, Optional[str]] = Field(
default_factory=dict,
description="Highlighted HTML fragments keyed by field name",
)
type: Optional[str] = Field(default=None, description="Type of the result entry")
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
extra: Optional[str] = Field(default=None, description="Additional information about the result")
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
highlightedSubText: Optional[str] = Field(
default=None,
description="Highlighted secondary text segment",
)
highlightedExtra: Optional[str] = Field(
default=None,
description="Highlighted additional information",
)
model_config = ConfigDict(populate_by_name=True)
class SearchResponse(BaseModel):
"""Response payload returned by the search endpoint."""
"""Response envelope returned from the MCP search tool."""
results: list[SearchResult] = Field(default_factory=list)
keyword: str = Field(description="Sanitised keyword that was searched for")
total_results: int = Field(description="Total number of results returned by the backend")
limit: int = Field(description="Maximum number of results included in the response")
results: list[SearchResult] = Field(default_factory=list, description="Search results up to the requested limit")
__all__ = ["SearchResult", "SearchResponse"]
model_config = ConfigDict(populate_by_name=True)

View File

View File

@@ -1,66 +1,164 @@
"""FastAPI application exposing the MCP server endpoints."""
"""Entry point for the OpenIsle MCP server."""
from __future__ import annotations
import argparse
import logging
import os
from typing import Annotated, Optional
from fastapi import Depends, FastAPI, HTTPException, Query, Request
import httpx
from mcp.server.fastmcp import Context, FastMCP
from mcp.server.fastmcp import exceptions as mcp_exceptions
from pydantic import Field
from .client import SearchClient
from .config import get_settings
from .models import SearchResponse, SearchResult
from .client import BackendClientError, OpenIsleBackendClient
from .models import BackendSearchResult, SearchResponse, SearchResult
LOGGER = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
APP_NAME = "openisle-mcp"
DEFAULT_BACKEND_URL = "http://springboot:8080"
DEFAULT_TRANSPORT = "stdio"
DEFAULT_TIMEOUT = 10.0
DEFAULT_LIMIT = 20
MAX_LIMIT = 50
server = FastMCP(
APP_NAME,
instructions=(
"Use the `search` tool to query OpenIsle content. "
"Results include posts, comments, users, categories, and tags."
),
)
async def _lifespan(app: FastAPI):
settings = get_settings()
client = SearchClient(settings)
app.state.settings = settings
app.state.search_client = client
LOGGER.info(
"Starting MCP server on %s:%s targeting backend %s",
settings.host,
settings.port,
settings.normalized_backend_base_url,
)
def _env(name: str, default: Optional[str] = None) -> Optional[str]:
value = os.getenv(name, default)
if value is None:
return None
trimmed = value.strip()
return trimmed or default
def _load_timeout() -> float:
raw = _env("OPENISLE_BACKEND_TIMEOUT", str(DEFAULT_TIMEOUT))
try:
yield
finally:
LOGGER.info("Shutting down MCP server")
await client.close()
timeout = float(raw) if raw is not None else DEFAULT_TIMEOUT
except ValueError:
logger.warning("Invalid OPENISLE_BACKEND_TIMEOUT value '%s', falling back to %s", raw, DEFAULT_TIMEOUT)
return DEFAULT_TIMEOUT
if timeout <= 0:
logger.warning("Non-positive OPENISLE_BACKEND_TIMEOUT %s, falling back to %s", timeout, DEFAULT_TIMEOUT)
return DEFAULT_TIMEOUT
return timeout
def create_app() -> FastAPI:
"""Create and configure the FastAPI application."""
app = FastAPI(title="OpenIsle MCP Server", lifespan=_lifespan)
@app.get("/healthz", tags=["health"])
async def healthcheck() -> dict[str, str]:
return {"status": "ok"}
async def get_client(request: Request) -> SearchClient:
return request.app.state.search_client
@app.get("/search", response_model=SearchResponse, tags=["search"])
async def search(
keyword: str = Query(..., min_length=1, description="Keyword to search for"),
client: SearchClient = Depends(get_client),
) -> SearchResponse:
try:
raw_results = await client.global_search(keyword)
except httpx.HTTPStatusError as exc:
LOGGER.warning("Backend responded with error %s", exc.response.status_code)
raise HTTPException(status_code=exc.response.status_code, detail="Backend error") from exc
except httpx.HTTPError as exc:
LOGGER.error("Failed to reach backend: %s", exc)
raise HTTPException(status_code=503, detail="Search service unavailable") from exc
results = [SearchResult.model_validate(item) for item in raw_results]
return SearchResponse(results=results)
return app
_BACKEND_CLIENT = OpenIsleBackendClient(
base_url=_env("OPENISLE_BACKEND_URL", DEFAULT_BACKEND_URL) or DEFAULT_BACKEND_URL,
timeout=_load_timeout(),
)
_PUBLIC_BASE_URL = _env("OPENISLE_PUBLIC_BASE_URL")
__all__ = ["create_app"]
def _build_url(result: BackendSearchResult) -> Optional[str]:
if not _PUBLIC_BASE_URL:
return None
base = _PUBLIC_BASE_URL.rstrip("/")
if result.type in {"post", "post_title"} and result.id is not None:
return f"{base}/posts/{result.id}"
if result.type == "comment" and result.post_id is not None:
anchor = f"#comment-{result.id}" if result.id is not None else ""
return f"{base}/posts/{result.post_id}{anchor}"
if result.type == "user" and result.id is not None:
return f"{base}/users/{result.id}"
if result.type == "category" and result.id is not None:
return f"{base}/?categoryId={result.id}"
if result.type == "tag" and result.id is not None:
return f"{base}/?tagIds={result.id}"
return None
def _to_search_result(result: BackendSearchResult) -> SearchResult:
highlights = {
"text": result.highlighted_text,
"subText": result.highlighted_sub_text,
"extra": result.highlighted_extra,
}
# Remove empty highlight entries to keep the payload clean
highlights = {key: value for key, value in highlights.items() if value}
return SearchResult(
type=result.type,
id=result.id,
title=result.text,
subtitle=result.sub_text,
extra=result.extra,
post_id=result.post_id,
url=_build_url(result),
highlights=highlights,
)
KeywordParam = Annotated[str, Field(description="Keyword to search for", min_length=1)]
LimitParam = Annotated[
int,
Field(ge=1, le=MAX_LIMIT, description=f"Maximum number of results to return (<= {MAX_LIMIT})"),
]
@server.tool(name="search", description="Search OpenIsle content")
async def search(keyword: KeywordParam, limit: LimitParam = DEFAULT_LIMIT, ctx: Optional[Context] = None) -> SearchResponse:
"""Run a search query against the OpenIsle backend."""
trimmed = keyword.strip()
if not trimmed:
raise mcp_exceptions.ToolError("Keyword must not be empty")
if ctx is not None:
await ctx.debug(f"Searching OpenIsle for '{trimmed}' (limit={limit})")
try:
raw_results = await _BACKEND_CLIENT.search_global(trimmed)
except BackendClientError as exc:
if ctx is not None:
await ctx.error(f"Search request failed: {exc}")
raise mcp_exceptions.ToolError(f"Search failed: {exc}") from exc
results = [_to_search_result(result) for result in raw_results]
limited = results[:limit]
if ctx is not None:
await ctx.info(
"Search completed",
keyword=trimmed,
total_results=len(results),
returned=len(limited),
)
return SearchResponse(keyword=trimmed, total_results=len(results), limit=limit, results=limited)
def main() -> None:
parser = argparse.ArgumentParser(description="Run the OpenIsle MCP server")
parser.add_argument(
"--transport",
choices=["stdio", "sse", "streamable-http"],
default=_env("OPENISLE_MCP_TRANSPORT", DEFAULT_TRANSPORT),
help="Transport protocol to use",
)
parser.add_argument(
"--mount-path",
default=_env("OPENISLE_MCP_SSE_MOUNT_PATH", "/mcp"),
help="Mount path when using the SSE transport",
)
args = parser.parse_args()
logging.basicConfig(level=os.getenv("OPENISLE_MCP_LOG_LEVEL", "INFO"))
logger.info(
"Starting OpenIsle MCP server", extra={"transport": args.transport, "backend": _BACKEND_CLIENT.base_url}
)
server.run(transport=args.transport, mount_path=args.mount_path)
if __name__ == "__main__":
main()