mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-21 22:41:05 +08:00
Compare commits
1 Commits
codex/impr
...
codex/crea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfa7530373 |
@@ -213,6 +213,30 @@ services:
|
|||||||
- dev_local_backend
|
- dev_local_backend
|
||||||
- prod
|
- prod
|
||||||
|
|
||||||
|
mcp-server:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: docker/mcp-service.Dockerfile
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||||
|
env_file:
|
||||||
|
- ${ENV_FILE:-../.env}
|
||||||
|
environment:
|
||||||
|
OPENISLE_API_BASE_URL: ${OPENISLE_API_BASE_URL:-http://springboot:8080}
|
||||||
|
OPENISLE_MCP_HOST: ${OPENISLE_MCP_HOST:-0.0.0.0}
|
||||||
|
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8000}
|
||||||
|
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
|
||||||
|
ports:
|
||||||
|
- "${OPENISLE_MCP_PORT:-8000}:8000"
|
||||||
|
depends_on:
|
||||||
|
springboot:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- openisle-network
|
||||||
|
profiles:
|
||||||
|
- dev
|
||||||
|
- dev_local_backend
|
||||||
|
- prod
|
||||||
|
|
||||||
frontend_dev:
|
frontend_dev:
|
||||||
image: node:20
|
image: node:20
|
||||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev
|
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev
|
||||||
|
|||||||
20
docker/mcp-service.Dockerfile
Normal file
20
docker/mcp-service.Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
FROM python:3.11-slim AS base
|
||||||
|
|
||||||
|
ENV PYTHONUNBUFFERED=1 \
|
||||||
|
PIP_NO_CACHE_DIR=1
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY mcp/pyproject.toml mcp/README.md ./
|
||||||
|
COPY mcp/src ./src
|
||||||
|
RUN pip install --upgrade pip \
|
||||||
|
&& pip install .
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
ENV OPENISLE_API_BASE_URL=http://springboot:8080 \
|
||||||
|
OPENISLE_MCP_HOST=0.0.0.0 \
|
||||||
|
OPENISLE_MCP_PORT=8000 \
|
||||||
|
OPENISLE_MCP_TRANSPORT=streamable-http
|
||||||
|
|
||||||
|
CMD ["openisle-mcp"]
|
||||||
6
mcp/.gitignore
vendored
Normal file
6
mcp/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*.egg-info/
|
||||||
|
.build/
|
||||||
|
.venv/
|
||||||
|
.env
|
||||||
45
mcp/README.md
Normal file
45
mcp/README.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# OpenIsle MCP Server
|
||||||
|
|
||||||
|
This package exposes a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server for OpenIsle.
|
||||||
|
The initial release focuses on surfacing the platform's search capabilities so that AI assistants can discover
|
||||||
|
users and posts directly through the existing REST API. Future iterations can expand this service with post
|
||||||
|
creation and other productivity tools.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- 🔍 Keyword search across users and posts using the OpenIsle backend APIs
|
||||||
|
- ✅ Structured MCP tool response for downstream reasoning
|
||||||
|
- 🩺 Lightweight health check endpoint (`/health`) for container orchestration
|
||||||
|
- ⚙️ Configurable via environment variables with sensible defaults for Docker Compose
|
||||||
|
|
||||||
|
## Running locally
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd mcp
|
||||||
|
pip install .
|
||||||
|
openisle-mcp # starts the MCP server on http://127.0.0.1:8000 by default
|
||||||
|
```
|
||||||
|
|
||||||
|
By default the server targets `http://localhost:8080` for backend requests. Override the target by setting
|
||||||
|
`OPENISLE_API_BASE_URL` before starting the service.
|
||||||
|
|
||||||
|
## Environment variables
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
| -------- | ------- | ----------- |
|
||||||
|
| `OPENISLE_API_BASE_URL` | `http://localhost:8080` | Base URL of the OpenIsle backend API |
|
||||||
|
| `OPENISLE_MCP_HOST` | `127.0.0.1` | Hostname/interface for the MCP HTTP server |
|
||||||
|
| `OPENISLE_MCP_PORT` | `8000` | Port for the MCP HTTP server |
|
||||||
|
| `OPENISLE_MCP_TRANSPORT` | `streamable-http` | Transport mode (`stdio`, `sse`, or `streamable-http`) |
|
||||||
|
| `OPENISLE_MCP_TIMEOUT_SECONDS` | `10` | HTTP timeout when calling the backend |
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
|
||||||
|
The repository's Docker Compose stack now includes the MCP server. To start it alongside other services:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd docker
|
||||||
|
docker compose --profile dev up mcp-server
|
||||||
|
```
|
||||||
|
|
||||||
|
The service exposes port `8000` by default. Update `OPENISLE_MCP_PORT` to customize the mapped port.
|
||||||
28
mcp/pyproject.toml
Normal file
28
mcp/pyproject.toml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=68", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "openisle-mcp"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Model Context Protocol server exposing OpenIsle search capabilities."
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
authors = [{ name = "OpenIsle" }]
|
||||||
|
dependencies = [
|
||||||
|
"mcp>=1.19.0",
|
||||||
|
"httpx>=0.28.1",
|
||||||
|
"pydantic>=2.7.0"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://github.com/openisle/openisle"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
openisle-mcp = "openisle_mcp.server:main"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["src"]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
openisle_mcp = ["py.typed"]
|
||||||
14
mcp/src/openisle_mcp/__init__.py
Normal file
14
mcp/src/openisle_mcp/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
"""OpenIsle MCP server package."""
|
||||||
|
|
||||||
|
from .config import Settings, get_settings
|
||||||
|
from .models import SearchItem, SearchResponse, SearchScope
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Settings",
|
||||||
|
"get_settings",
|
||||||
|
"SearchItem",
|
||||||
|
"SearchResponse",
|
||||||
|
"SearchScope",
|
||||||
|
]
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
33
mcp/src/openisle_mcp/client.py
Normal file
33
mcp/src/openisle_mcp/client.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""HTTP client helpers for interacting with the OpenIsle backend APIs."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from .config import Settings, get_settings
|
||||||
|
from .models import SearchScope
|
||||||
|
|
||||||
|
|
||||||
|
class OpenIsleAPI:
|
||||||
|
"""Thin wrapper around the OpenIsle REST API used by the MCP server."""
|
||||||
|
|
||||||
|
def __init__(self, settings: Settings | None = None) -> None:
|
||||||
|
self._settings = settings or get_settings()
|
||||||
|
|
||||||
|
async def search(self, scope: SearchScope, keyword: str) -> list[Any]:
|
||||||
|
"""Execute a search request against the backend API."""
|
||||||
|
|
||||||
|
url_path = self._settings.get_search_path(scope)
|
||||||
|
async with httpx.AsyncClient(
|
||||||
|
base_url=str(self._settings.backend_base_url),
|
||||||
|
timeout=self._settings.request_timeout_seconds,
|
||||||
|
) as client:
|
||||||
|
response = await client.get(url_path, params={"keyword": keyword})
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if not isinstance(data, list):
|
||||||
|
raise RuntimeError("Unexpected search response payload: expected a list")
|
||||||
|
return data
|
||||||
83
mcp/src/openisle_mcp/config.py
Normal file
83
mcp/src/openisle_mcp/config.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""Configuration helpers for the OpenIsle MCP server."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import Dict, Literal
|
||||||
|
|
||||||
|
from pydantic import AnyHttpUrl, BaseModel, Field, ValidationError
|
||||||
|
|
||||||
|
from .models import SearchScope
|
||||||
|
|
||||||
|
TransportType = Literal["stdio", "sse", "streamable-http"]
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseModel):
|
||||||
|
"""Runtime configuration for the MCP server."""
|
||||||
|
|
||||||
|
backend_base_url: AnyHttpUrl = Field(
|
||||||
|
default="http://localhost:8080",
|
||||||
|
description="Base URL of the OpenIsle backend API.",
|
||||||
|
)
|
||||||
|
request_timeout_seconds: float = Field(
|
||||||
|
default=10.0,
|
||||||
|
gt=0,
|
||||||
|
description="HTTP timeout when talking to the backend APIs.",
|
||||||
|
)
|
||||||
|
transport: TransportType = Field(
|
||||||
|
default="streamable-http",
|
||||||
|
description="Transport mode for the MCP server.",
|
||||||
|
)
|
||||||
|
host: str = Field(default="127.0.0.1", description="Hostname/interface used by the MCP HTTP server.")
|
||||||
|
port: int = Field(default=8000, ge=0, description="Port used by the MCP HTTP server.")
|
||||||
|
search_paths: Dict[str, str] = Field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
SearchScope.GLOBAL.value: "/api/search/global",
|
||||||
|
SearchScope.USERS.value: "/api/search/users",
|
||||||
|
SearchScope.POSTS.value: "/api/search/posts",
|
||||||
|
SearchScope.POSTS_TITLE.value: "/api/search/posts/title",
|
||||||
|
SearchScope.POSTS_CONTENT.value: "/api/search/posts/content",
|
||||||
|
},
|
||||||
|
description="Mapping between search scopes and backend API paths.",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_search_path(self, scope: SearchScope) -> str:
|
||||||
|
"""Return the backend path associated with a given search scope."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.search_paths[scope.value]
|
||||||
|
except KeyError as exc: # pragma: no cover - defensive guard
|
||||||
|
raise ValueError(f"Unsupported search scope: {scope}") from exc
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_settings() -> Settings:
|
||||||
|
"""Load settings from environment variables with caching."""
|
||||||
|
|
||||||
|
raw_settings: Dict[str, object] = {}
|
||||||
|
|
||||||
|
backend_url = os.getenv("OPENISLE_API_BASE_URL")
|
||||||
|
if backend_url:
|
||||||
|
raw_settings["backend_base_url"] = backend_url
|
||||||
|
|
||||||
|
timeout = os.getenv("OPENISLE_MCP_TIMEOUT_SECONDS")
|
||||||
|
if timeout:
|
||||||
|
raw_settings["request_timeout_seconds"] = float(timeout)
|
||||||
|
|
||||||
|
transport = os.getenv("OPENISLE_MCP_TRANSPORT")
|
||||||
|
if transport:
|
||||||
|
raw_settings["transport"] = transport
|
||||||
|
|
||||||
|
host = os.getenv("OPENISLE_MCP_HOST")
|
||||||
|
if host:
|
||||||
|
raw_settings["host"] = host
|
||||||
|
|
||||||
|
port = os.getenv("OPENISLE_MCP_PORT")
|
||||||
|
if port:
|
||||||
|
raw_settings["port"] = int(port)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return Settings(**raw_settings)
|
||||||
|
except (ValidationError, ValueError) as exc: # pragma: no cover - configuration errors should surface clearly
|
||||||
|
raise RuntimeError(f"Invalid MCP configuration: {exc}") from exc
|
||||||
45
mcp/src/openisle_mcp/models.py
Normal file
45
mcp/src/openisle_mcp/models.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"""Data models for the OpenIsle MCP server."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class SearchScope(str, Enum):
|
||||||
|
"""Supported search scopes exposed via the MCP tool."""
|
||||||
|
|
||||||
|
GLOBAL = "global"
|
||||||
|
USERS = "users"
|
||||||
|
POSTS = "posts"
|
||||||
|
POSTS_TITLE = "posts_title"
|
||||||
|
POSTS_CONTENT = "posts_content"
|
||||||
|
|
||||||
|
|
||||||
|
class Highlight(BaseModel):
|
||||||
|
"""Highlighted fragments returned by the backend search API."""
|
||||||
|
|
||||||
|
text: Optional[str] = Field(default=None, description="Highlighted main text snippet.")
|
||||||
|
sub_text: Optional[str] = Field(default=None, description="Highlighted secondary text snippet.")
|
||||||
|
extra: Optional[str] = Field(default=None, description="Additional highlighted data.")
|
||||||
|
|
||||||
|
|
||||||
|
class SearchItem(BaseModel):
|
||||||
|
"""Normalized representation of a single search result."""
|
||||||
|
|
||||||
|
category: str = Field(description="Type/category of the search result, e.g. user or post.")
|
||||||
|
title: Optional[str] = Field(default=None, description="Primary title or label for the result.")
|
||||||
|
description: Optional[str] = Field(default=None, description="Supporting description or summary text.")
|
||||||
|
url: Optional[str] = Field(default=None, description="Canonical URL that references the resource, if available.")
|
||||||
|
metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional structured metadata extracted from the API.")
|
||||||
|
highlights: Optional[Highlight] = Field(default=None, description="Highlighted snippets returned by the backend search API.")
|
||||||
|
|
||||||
|
|
||||||
|
class SearchResponse(BaseModel):
|
||||||
|
"""Structured response returned by the MCP search tool."""
|
||||||
|
|
||||||
|
scope: SearchScope = Field(description="Scope of the search that produced the results.")
|
||||||
|
keyword: str = Field(description="Keyword submitted to the backend search endpoint.")
|
||||||
|
results: list[SearchItem] = Field(default_factory=list, description="Normalized search results from the backend API.")
|
||||||
0
mcp/src/openisle_mcp/py.typed
Normal file
0
mcp/src/openisle_mcp/py.typed
Normal file
100
mcp/src/openisle_mcp/search.py
Normal file
100
mcp/src/openisle_mcp/search.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"""Utilities for normalising OpenIsle search results."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Iterable
|
||||||
|
|
||||||
|
from .models import Highlight, SearchItem, SearchScope
|
||||||
|
|
||||||
|
|
||||||
|
def _truncate(text: str | None, *, limit: int = 240) -> str | None:
|
||||||
|
"""Compress whitespace and truncate overly long text fragments."""
|
||||||
|
|
||||||
|
if not text:
|
||||||
|
return None
|
||||||
|
compact = re.sub(r"\s+", " ", text).strip()
|
||||||
|
if len(compact) <= limit:
|
||||||
|
return compact
|
||||||
|
return f"{compact[:limit - 1]}…"
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_highlight(data: dict[str, Any]) -> Highlight | None:
|
||||||
|
highlighted = {
|
||||||
|
"text": data.get("highlightedText"),
|
||||||
|
"sub_text": data.get("highlightedSubText"),
|
||||||
|
"extra": data.get("highlightedExtra"),
|
||||||
|
}
|
||||||
|
if any(highlighted.values()):
|
||||||
|
return Highlight(**highlighted)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def normalise_results(scope: SearchScope, payload: Iterable[dict[str, Any]]) -> list[SearchItem]:
|
||||||
|
"""Convert backend payloads into :class:`SearchItem` entries."""
|
||||||
|
|
||||||
|
normalised: list[SearchItem] = []
|
||||||
|
|
||||||
|
for item in payload:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if scope is SearchScope.GLOBAL:
|
||||||
|
normalised.append(
|
||||||
|
SearchItem(
|
||||||
|
category=item.get("type", scope.value),
|
||||||
|
title=_truncate(item.get("text")),
|
||||||
|
description=_truncate(item.get("subText")),
|
||||||
|
metadata={
|
||||||
|
"id": item.get("id"),
|
||||||
|
"postId": item.get("postId"),
|
||||||
|
"extra": item.get("extra"),
|
||||||
|
},
|
||||||
|
highlights=_extract_highlight(item),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if scope in {SearchScope.POSTS, SearchScope.POSTS_CONTENT, SearchScope.POSTS_TITLE}:
|
||||||
|
author = item.get("author") or {}
|
||||||
|
category = item.get("category") or {}
|
||||||
|
metadata = {
|
||||||
|
"id": item.get("id"),
|
||||||
|
"author": author.get("username"),
|
||||||
|
"category": category.get("name"),
|
||||||
|
"views": item.get("views"),
|
||||||
|
"commentCount": item.get("commentCount"),
|
||||||
|
"tags": [tag.get("name") for tag in item.get("tags", []) if isinstance(tag, dict)],
|
||||||
|
}
|
||||||
|
normalised.append(
|
||||||
|
SearchItem(
|
||||||
|
category="post",
|
||||||
|
title=_truncate(item.get("title")),
|
||||||
|
description=_truncate(item.get("content")),
|
||||||
|
metadata={k: v for k, v in metadata.items() if v is not None},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if scope is SearchScope.USERS:
|
||||||
|
metadata = {
|
||||||
|
"id": item.get("id"),
|
||||||
|
"email": item.get("email"),
|
||||||
|
"followers": item.get("followers"),
|
||||||
|
"following": item.get("following"),
|
||||||
|
"role": item.get("role"),
|
||||||
|
}
|
||||||
|
normalised.append(
|
||||||
|
SearchItem(
|
||||||
|
category="user",
|
||||||
|
title=_truncate(item.get("username")),
|
||||||
|
description=_truncate(item.get("introduction")),
|
||||||
|
metadata={k: v for k, v in metadata.items() if v is not None},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Fallback: include raw entry to aid debugging of unsupported scopes
|
||||||
|
normalised.append(SearchItem(category=scope.value, metadata=item))
|
||||||
|
|
||||||
|
return normalised
|
||||||
121
mcp/src/openisle_mcp/server.py
Normal file
121
mcp/src/openisle_mcp/server.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
"""Entry point for the OpenIsle MCP server."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from mcp.server.fastmcp import Context, FastMCP
|
||||||
|
from mcp.server.fastmcp.logging import configure_logging
|
||||||
|
from pydantic import Field
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import JSONResponse, Response
|
||||||
|
|
||||||
|
from .client import OpenIsleAPI
|
||||||
|
from .config import Settings, get_settings
|
||||||
|
from .models import SearchResponse, SearchScope
|
||||||
|
from .search import normalise_results
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_server(settings: Settings) -> FastMCP:
|
||||||
|
"""Instantiate the FastMCP server with configured metadata."""
|
||||||
|
|
||||||
|
server = FastMCP(
|
||||||
|
name="OpenIsle MCP",
|
||||||
|
instructions=(
|
||||||
|
"Access OpenIsle search functionality. Provide a keyword and optionally a scope to "
|
||||||
|
"discover users and posts from the community."
|
||||||
|
),
|
||||||
|
host=settings.host,
|
||||||
|
port=settings.port,
|
||||||
|
transport_security=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
@server.custom_route("/health", methods=["GET"])
|
||||||
|
async def health(_: Request) -> Response: # pragma: no cover - exercised via runtime checks
|
||||||
|
return JSONResponse({"status": "ok"})
|
||||||
|
|
||||||
|
return server
|
||||||
|
|
||||||
|
|
||||||
|
async def _execute_search(
|
||||||
|
*,
|
||||||
|
api: OpenIsleAPI,
|
||||||
|
scope: SearchScope,
|
||||||
|
keyword: str,
|
||||||
|
context: Context | None,
|
||||||
|
) -> SearchResponse:
|
||||||
|
message = f"Searching OpenIsle scope={scope.value} keyword={keyword!r}"
|
||||||
|
if context is not None:
|
||||||
|
context.info(message)
|
||||||
|
else:
|
||||||
|
_logger.info(message)
|
||||||
|
|
||||||
|
payload = await api.search(scope, keyword)
|
||||||
|
items = normalise_results(scope, payload)
|
||||||
|
return SearchResponse(scope=scope, keyword=keyword, results=items)
|
||||||
|
|
||||||
|
|
||||||
|
def build_server(settings: Settings | None = None) -> FastMCP:
|
||||||
|
"""Configure and return the FastMCP server instance."""
|
||||||
|
|
||||||
|
resolved_settings = settings or get_settings()
|
||||||
|
server = _create_server(resolved_settings)
|
||||||
|
api_client = OpenIsleAPI(resolved_settings)
|
||||||
|
|
||||||
|
@server.tool(
|
||||||
|
name="openisle_search",
|
||||||
|
description="Search OpenIsle for users and posts.",
|
||||||
|
)
|
||||||
|
async def openisle_search(
|
||||||
|
keyword: Annotated[str, Field(description="Keyword used to query OpenIsle search.")],
|
||||||
|
scope: Annotated[
|
||||||
|
SearchScope,
|
||||||
|
Field(
|
||||||
|
description=(
|
||||||
|
"Scope of the search. Use 'global' to search across users and posts, or specify "
|
||||||
|
"'users', 'posts', 'posts_title', or 'posts_content' to narrow the results."
|
||||||
|
)
|
||||||
|
),
|
||||||
|
] = SearchScope.GLOBAL,
|
||||||
|
context: Context | None = None,
|
||||||
|
) -> SearchResponse:
|
||||||
|
try:
|
||||||
|
return await _execute_search(api=api_client, scope=scope, keyword=keyword, context=context)
|
||||||
|
except Exception as exc: # pragma: no cover - surfaced to the MCP runtime
|
||||||
|
error_message = f"Search failed: {exc}"
|
||||||
|
if context is not None:
|
||||||
|
context.error(error_message)
|
||||||
|
_logger.exception("Search tool failed")
|
||||||
|
raise
|
||||||
|
|
||||||
|
return server
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""CLI entry point used by the console script."""
|
||||||
|
|
||||||
|
settings = get_settings()
|
||||||
|
configure_logging("INFO")
|
||||||
|
server = build_server(settings)
|
||||||
|
|
||||||
|
transport = os.getenv("OPENISLE_MCP_TRANSPORT", settings.transport)
|
||||||
|
if transport not in {"stdio", "sse", "streamable-http"}:
|
||||||
|
raise RuntimeError(f"Unsupported transport mode: {transport}")
|
||||||
|
|
||||||
|
_logger.info("Starting OpenIsle MCP server on %s:%s via %s", settings.host, settings.port, transport)
|
||||||
|
|
||||||
|
if transport == "stdio":
|
||||||
|
server.run("stdio")
|
||||||
|
elif transport == "sse":
|
||||||
|
mount_path = os.getenv("OPENISLE_MCP_SSE_PATH")
|
||||||
|
server.run("sse", mount_path=mount_path)
|
||||||
|
else:
|
||||||
|
server.run("streamable-http")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": # pragma: no cover - manual execution path
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user