Compare commits

..

1 Commits

Author SHA1 Message Date
Tim
28efd376b6 feat: add MCP search service 2025-10-24 17:06:13 +08:00
26 changed files with 414 additions and 950 deletions

View File

@@ -2,7 +2,6 @@
SERVER_PORT=8080
FRONTEND_PORT=3000
WEBSOCKET_PORT=8082
OPENISLE_MCP_PORT=8085
MYSQL_PORT=3306
REDIS_PORT=6379
RABBITMQ_PORT=5672

1
.gitignore vendored
View File

@@ -17,6 +17,7 @@ dist
# misc
.DS_Store
__pycache__/
*.pem
npm-debug.log*
yarn-debug.log*

View File

@@ -1,13 +1,11 @@
package com.openisle.controller;
import com.openisle.dto.CommentContextDto;
import com.openisle.dto.CommentDto;
import com.openisle.dto.CommentRequest;
import com.openisle.dto.PostChangeLogDto;
import com.openisle.dto.TimelineItemDto;
import com.openisle.mapper.CommentMapper;
import com.openisle.mapper.PostChangeLogMapper;
import com.openisle.mapper.PostMapper;
import com.openisle.model.Comment;
import com.openisle.model.CommentSort;
import com.openisle.service.*;
@@ -42,7 +40,6 @@ public class CommentController {
private final PointService pointService;
private final PostChangeLogService changeLogService;
private final PostChangeLogMapper postChangeLogMapper;
private final PostMapper postMapper;
@Value("${app.captcha.enabled:false}")
private boolean captchaEnabled;
@@ -187,37 +184,6 @@ public class CommentController {
return itemDtoList;
}
@GetMapping("/comments/{commentId}/context")
@Operation(
summary = "Comment context",
description = "Get a comment along with its previous comments and related post"
)
@ApiResponse(
responseCode = "200",
description = "Comment context",
content = @Content(schema = @Schema(implementation = CommentContextDto.class))
)
public ResponseEntity<CommentContextDto> getCommentContext(@PathVariable Long commentId) {
log.debug("getCommentContext called for comment {}", commentId);
Comment comment = commentService.getComment(commentId);
CommentContextDto dto = new CommentContextDto();
dto.setPost(postMapper.toSummaryDto(comment.getPost()));
dto.setTargetComment(commentMapper.toDtoWithReplies(comment));
dto.setPreviousComments(
commentService
.getCommentsBefore(comment)
.stream()
.map(commentMapper::toDtoWithReplies)
.collect(Collectors.toList())
);
log.debug(
"getCommentContext returning {} previous comments for comment {}",
dto.getPreviousComments().size(),
commentId
);
return ResponseEntity.ok(dto);
}
@DeleteMapping("/comments/{id}")
@Operation(summary = "Delete comment", description = "Delete a comment")
@ApiResponse(responseCode = "200", description = "Deleted")

View File

@@ -224,26 +224,6 @@ public class PostController {
.collect(Collectors.toList());
}
@GetMapping("/recent")
@Operation(
summary = "Recent posts",
description = "List posts created within the specified number of minutes"
)
@ApiResponse(
responseCode = "200",
description = "Recent posts",
content = @Content(
array = @ArraySchema(schema = @Schema(implementation = PostSummaryDto.class))
)
)
public List<PostSummaryDto> recentPosts(@RequestParam("minutes") int minutes) {
return postService
.listRecentPosts(minutes)
.stream()
.map(postMapper::toSummaryDto)
.collect(Collectors.toList());
}
@GetMapping("/ranking")
@Operation(summary = "Ranking posts", description = "List posts by view rankings")
@ApiResponse(

View File

@@ -1,15 +0,0 @@
package com.openisle.dto;
import java.util.List;
import lombok.Data;
/**
* DTO representing the context of a comment including its post and previous comments.
*/
@Data
public class CommentContextDto {
private PostSummaryDto post;
private CommentDto targetComment;
private List<CommentDto> previousComments;
}

View File

@@ -3,7 +3,6 @@ package com.openisle.repository;
import com.openisle.model.Comment;
import com.openisle.model.Post;
import com.openisle.model.User;
import java.time.LocalDateTime;
import java.util.List;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
@@ -11,10 +10,6 @@ import org.springframework.data.jpa.repository.JpaRepository;
public interface CommentRepository extends JpaRepository<Comment, Long> {
List<Comment> findByPostAndParentIsNullOrderByCreatedAtAsc(Post post);
List<Comment> findByParentOrderByCreatedAtAsc(Comment parent);
List<Comment> findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
Post post,
LocalDateTime createdAt
);
List<Comment> findByAuthorOrderByCreatedAtDesc(User author, Pageable pageable);
List<Comment> findByContentContainingIgnoreCase(String keyword);

View File

@@ -19,10 +19,6 @@ public interface PostRepository extends JpaRepository<Post, Long> {
List<Post> findByStatusOrderByCreatedAtDesc(PostStatus status, Pageable pageable);
List<Post> findByStatusOrderByViewsDesc(PostStatus status);
List<Post> findByStatusOrderByViewsDesc(PostStatus status, Pageable pageable);
List<Post> findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
PostStatus status,
LocalDateTime createdAt
);
List<Post> findByAuthorAndStatusOrderByCreatedAtDesc(
User author,
PostStatus status,

View File

@@ -266,27 +266,6 @@ public class CommentService {
return replies;
}
public Comment getComment(Long commentId) {
log.debug("getComment called for id {}", commentId);
return commentRepository
.findById(commentId)
.orElseThrow(() -> new com.openisle.exception.NotFoundException("Comment not found"));
}
public List<Comment> getCommentsBefore(Comment comment) {
log.debug("getCommentsBefore called for comment {}", comment.getId());
List<Comment> comments = commentRepository.findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
comment.getPost(),
comment.getCreatedAt()
);
log.debug(
"getCommentsBefore returning {} comments for comment {}",
comments.size(),
comment.getId()
);
return comments;
}
public List<Comment> getRecentCommentsByUser(String username, int limit) {
log.debug("getRecentCommentsByUser called for user {} with limit {}", username, limit);
User user = userRepository

View File

@@ -770,18 +770,6 @@ public class PostService {
return listPostsByCategories(null, null, null);
}
public List<Post> listRecentPosts(int minutes) {
if (minutes <= 0) {
throw new IllegalArgumentException("Minutes must be positive");
}
LocalDateTime since = LocalDateTime.now().minusMinutes(minutes);
List<Post> posts = postRepository.findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
PostStatus.PUBLISHED,
since
);
return sortByPinnedAndCreated(posts);
}
public List<Post> listPostsByViews(Integer page, Integer pageSize) {
return listPostsByViews(null, null, page, pageSize);
}

View File

@@ -40,12 +40,12 @@ echo "👉 Build images ..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=production \
frontend_service mcp
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=staging \
frontend_service mcp
frontend_service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -178,32 +178,6 @@ services:
- dev
- prod
mcp:
build:
context: ..
dockerfile: docker/mcp.Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
OPENISLE_MCP_BACKEND_BASE_URL: http://springboot:${SERVER_PORT:-8080}
OPENISLE_MCP_HOST: 0.0.0.0
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8085}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
OPENISLE_MCP_REQUEST_TIMEOUT: ${OPENISLE_MCP_REQUEST_TIMEOUT:-10.0}
ports:
- "${OPENISLE_MCP_PORT:-8085}:${OPENISLE_MCP_PORT:-8085}"
depends_on:
springboot:
condition: service_started
networks:
- openisle-network
profiles:
- dev
- dev_local_backend
- prod
websocket-service:
image: maven:3.9-eclipse-temurin-17
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
@@ -239,6 +213,32 @@ services:
- dev_local_backend
- prod
mcp-service:
build:
context: ..
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
FASTMCP_HOST: 0.0.0.0
FASTMCP_PORT: ${MCP_PORT:-8765}
OPENISLE_BACKEND_URL: ${OPENISLE_BACKEND_URL:-http://springboot:8080}
OPENISLE_BACKEND_TIMEOUT: ${OPENISLE_BACKEND_TIMEOUT:-10}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-sse}
OPENISLE_MCP_SSE_MOUNT_PATH: ${OPENISLE_MCP_SSE_MOUNT_PATH:-/mcp}
ports:
- "${MCP_PORT:-8765}:${MCP_PORT:-8765}"
depends_on:
springboot:
condition: service_healthy
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- prod
frontend_dev:
image: node:20
container_name: ${COMPOSE_PROJECT_NAME}-openisle-frontend-dev

View File

@@ -1,21 +0,0 @@
FROM python:3.11-slim AS base
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
COPY mcp/pyproject.toml mcp/README.md ./
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
ENV OPENISLE_MCP_HOST=0.0.0.0 \
OPENISLE_MCP_PORT=8085 \
OPENISLE_MCP_TRANSPORT=streamable-http
EXPOSE 8085
CMD ["openisle-mcp"]

17
mcp/Dockerfile Normal file
View File

@@ -0,0 +1,17 @@
FROM python:3.11-slim AS runtime
ENV PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1
WORKDIR /app
COPY mcp/pyproject.toml /app/pyproject.toml
COPY mcp/README.md /app/README.md
COPY mcp/src /app/src
RUN pip install --upgrade pip \
&& pip install .
EXPOSE 8765
CMD ["openisle-mcp"]

View File

@@ -1,39 +1,39 @@
# OpenIsle MCP Server
This package provides a [Model Context Protocol](https://modelcontextprotocol.io) (MCP) server
that exposes OpenIsle's search capabilities as MCP tools. The initial release focuses on the
global search endpoint so the agent ecosystem can retrieve relevant posts, users, tags, and
other resources.
This package provides a [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server that exposes the OpenIsle
search capabilities to AI assistants. The server wraps the existing Spring Boot backend and currently provides a single `search`
tool. Future iterations can extend the server with additional functionality such as publishing new posts or moderating content.
## Configuration
## Features
The server is configured through environment variables (all prefixed with `OPENISLE_MCP_`):
- 🔍 **Global search** — delegates to the existing `/api/search/global` endpoint exposed by the OpenIsle backend.
- 🧠 **Structured results** — responses include highlights and deep links so AI clients can present the results cleanly.
- ⚙️ **Configurable** — point the server at any reachable OpenIsle backend by setting environment variables.
| Variable | Default | Description |
| --- | --- | --- |
| `BACKEND_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend. |
| `PORT` | `8085` | TCP port when running with the `streamable-http` transport. |
| `HOST` | `0.0.0.0` | Interface to bind when serving HTTP. |
| `TRANSPORT` | `streamable-http` | Transport to use (`stdio`, `sse`, or `streamable-http`). |
| `REQUEST_TIMEOUT` | `10.0` | Timeout (seconds) for backend HTTP requests. |
## Running locally
## Local development
```bash
pip install .
OPENISLE_MCP_BACKEND_BASE_URL="http://localhost:8080" openisle-mcp
cd mcp
python -m venv .venv
source .venv/bin/activate
pip install -e .
openisle-mcp --transport stdio # or "sse"/"streamable-http"
```
By default the server listens on port `8085` and serves MCP over Streamable HTTP.
Environment variables:
## Available tools
| Variable | Description | Default |
| --- | --- | --- |
| `OPENISLE_BACKEND_URL` | Base URL of the Spring Boot backend | `http://springboot:8080` |
| `OPENISLE_BACKEND_TIMEOUT` | Timeout (seconds) for backend HTTP calls | `10` |
| `OPENISLE_PUBLIC_BASE_URL` | Optional base URL used to build deep links in search results | *(unset)* |
| `OPENISLE_MCP_TRANSPORT` | MCP transport (`stdio`, `sse`, `streamable-http`) | `stdio` |
| `OPENISLE_MCP_SSE_MOUNT_PATH` | Mount path when using SSE transport | `/mcp` |
| `FASTMCP_HOST` | Host for SSE / HTTP transports | `127.0.0.1` |
| `FASTMCP_PORT` | Port for SSE / HTTP transports | `8000` |
| Tool | Description |
| --- | --- |
| `search` | Perform a global search against the OpenIsle backend. |
| `reply_to_comment` | Reply to an existing comment using a JWT token. |
| `recent_posts` | Retrieve posts created within the last *N* minutes. |
## Docker
The tools return structured data mirroring the backend DTOs, including highlighted snippets for
search results, the full comment payload for replies, and detailed metadata for recent posts.
A dedicated Docker image is provided and wired into `docker-compose.yaml`. The container listens on
`${MCP_PORT:-8765}` and connects to the backend service running in the same compose stack.

View File

@@ -1,27 +1,29 @@
[build-system]
requires = ["hatchling>=1.25"]
build-backend = "hatchling.build"
requires = ["setuptools>=68", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "openisle-mcp"
version = "0.1.0"
description = "Model Context Protocol server exposing OpenIsle search capabilities."
description = "Model Context Protocol server exposing OpenIsle search capabilities"
readme = "README.md"
authors = [{ name = "OpenIsle", email = "engineering@openisle.example" }]
authors = [{name = "OpenIsle Team"}]
license = {text = "MIT"}
requires-python = ">=3.11"
dependencies = [
"mcp>=1.19.0",
"httpx>=0.28,<0.29",
"pydantic>=2.12,<3",
"pydantic-settings>=2.11,<3"
"httpx>=0.28.0",
"pydantic>=2.12.0",
]
[project.scripts]
openisle-mcp = "openisle_mcp.server:main"
[tool.hatch.build]
packages = ["src/openisle_mcp"]
[tool.setuptools]
package-dir = {"" = "src"}
[tool.ruff]
line-length = 100
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.package-data]
openisle_mcp = ["py.typed"]

View File

@@ -1,6 +1,10 @@
"""OpenIsle MCP server package."""
from .config import Settings, get_settings
from importlib import metadata
__all__ = ["Settings", "get_settings"]
try:
__version__ = metadata.version("openisle-mcp")
except metadata.PackageNotFoundError: # pragma: no cover - best effort during dev
__version__ = "0.0.0"
__all__ = ["__version__"]

View File

@@ -0,0 +1,79 @@
"""HTTP client for talking to the OpenIsle backend."""
from __future__ import annotations
import json
import logging
from typing import List
import httpx
from pydantic import ValidationError
from .models import BackendSearchResult
__all__ = ["BackendClientError", "OpenIsleBackendClient"]
logger = logging.getLogger(__name__)
class BackendClientError(RuntimeError):
"""Raised when the backend cannot fulfil a request."""
class OpenIsleBackendClient:
"""Tiny wrapper around the Spring Boot search endpoints."""
def __init__(self, base_url: str, timeout: float = 10.0) -> None:
if not base_url:
raise ValueError("base_url must not be empty")
self._base_url = base_url.rstrip("/")
timeout = timeout if timeout > 0 else 10.0
self._timeout = httpx.Timeout(timeout, connect=timeout, read=timeout)
@property
def base_url(self) -> str:
return self._base_url
async def search_global(self, keyword: str) -> List[BackendSearchResult]:
"""Call `/api/search/global` and normalise the payload."""
url = f"{self._base_url}/api/search/global"
params = {"keyword": keyword}
headers = {"Accept": "application/json"}
logger.debug("Calling OpenIsle backend", extra={"url": url, "params": params})
try:
async with httpx.AsyncClient(timeout=self._timeout, headers=headers, follow_redirects=True) as client:
response = await client.get(url, params=params)
response.raise_for_status()
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors are rare in tests
body_preview = _truncate_body(exc.response.text)
raise BackendClientError(
f"Backend returned HTTP {exc.response.status_code}: {body_preview}"
) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors are rare in tests
raise BackendClientError(f"Failed to reach backend: {exc}") from exc
try:
payload = response.json()
except json.JSONDecodeError as exc:
raise BackendClientError("Backend returned invalid JSON") from exc
if not isinstance(payload, list):
raise BackendClientError("Unexpected search payload type; expected a list")
results: list[BackendSearchResult] = []
for item in payload:
try:
results.append(BackendSearchResult.model_validate(item))
except ValidationError as exc:
raise BackendClientError(f"Invalid search result payload: {exc}") from exc
return results
def _truncate_body(body: str, limit: int = 200) -> str:
body = body.strip()
if len(body) <= limit:
return body
return f"{body[:limit]}"

View File

@@ -1,52 +0,0 @@
"""Application configuration helpers for the OpenIsle MCP server."""
from __future__ import annotations
from functools import lru_cache
from typing import Literal
from pydantic import Field
from pydantic.networks import AnyHttpUrl
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""Configuration for the MCP server."""
backend_base_url: AnyHttpUrl = Field(
"http://springboot:8080",
description="Base URL for the OpenIsle backend service.",
)
host: str = Field(
"0.0.0.0",
description="Host interface to bind when running with HTTP transports.",
)
port: int = Field(
8085,
ge=1,
le=65535,
description="TCP port for HTTP transports.",
)
transport: Literal["stdio", "sse", "streamable-http"] = Field(
"streamable-http",
description="MCP transport to use when running the server.",
)
request_timeout: float = Field(
10.0,
gt=0,
description="Timeout (seconds) for backend search requests.",
)
model_config = SettingsConfigDict(
env_prefix="OPENISLE_MCP_",
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
@lru_cache(maxsize=1)
def get_settings() -> Settings:
"""Return cached application settings."""
return Settings()

View File

@@ -0,0 +1,58 @@
"""Pydantic models used by the OpenIsle MCP server."""
from __future__ import annotations
from typing import Dict, Optional
from pydantic import BaseModel, ConfigDict, Field
__all__ = [
"BackendSearchResult",
"SearchResult",
"SearchResponse",
]
class BackendSearchResult(BaseModel):
"""Shape of the payload returned by the OpenIsle backend."""
type: str
id: Optional[int] = None
text: Optional[str] = None
sub_text: Optional[str] = Field(default=None, alias="subText")
extra: Optional[str] = None
post_id: Optional[int] = Field(default=None, alias="postId")
highlighted_text: Optional[str] = Field(default=None, alias="highlightedText")
highlighted_sub_text: Optional[str] = Field(default=None, alias="highlightedSubText")
highlighted_extra: Optional[str] = Field(default=None, alias="highlightedExtra")
model_config = ConfigDict(populate_by_name=True, extra="ignore")
class SearchResult(BaseModel):
"""Structured search result returned to MCP clients."""
type: str = Field(description="Entity type, e.g. post, comment, user")
id: Optional[int] = Field(default=None, description="Primary identifier for the entity")
title: Optional[str] = Field(default=None, description="Primary text to display")
subtitle: Optional[str] = Field(default=None, description="Secondary text (e.g. author or category)")
extra: Optional[str] = Field(default=None, description="Additional descriptive snippet")
post_id: Optional[int] = Field(default=None, description="Associated post id for comment results")
url: Optional[str] = Field(default=None, description="Deep link to the resource inside OpenIsle")
highlights: Dict[str, Optional[str]] = Field(
default_factory=dict,
description="Highlighted HTML fragments keyed by field name",
)
model_config = ConfigDict(populate_by_name=True)
class SearchResponse(BaseModel):
"""Response envelope returned from the MCP search tool."""
keyword: str = Field(description="Sanitised keyword that was searched for")
total_results: int = Field(description="Total number of results returned by the backend")
limit: int = Field(description="Maximum number of results included in the response")
results: list[SearchResult] = Field(default_factory=list, description="Search results up to the requested limit")
model_config = ConfigDict(populate_by_name=True)

View File

View File

@@ -1,273 +0,0 @@
"""Pydantic models describing tool inputs and outputs."""
from __future__ import annotations
from datetime import datetime
from typing import Any, Optional
from pydantic import BaseModel, Field, ConfigDict
class SearchResultItem(BaseModel):
"""A single search result entry."""
type: str = Field(description="Entity type for the result (post, user, tag, etc.).")
id: Optional[int] = Field(default=None, description="Identifier of the matched entity.")
text: Optional[str] = Field(default=None, description="Primary text associated with the result.")
sub_text: Optional[str] = Field(
default=None,
alias="subText",
description="Secondary text, e.g. a username or excerpt.",
)
extra: Optional[str] = Field(default=None, description="Additional contextual information.")
post_id: Optional[int] = Field(
default=None,
alias="postId",
description="Associated post identifier when relevant.",
)
highlighted_text: Optional[str] = Field(
default=None,
alias="highlightedText",
description="Highlighted snippet of the primary text if available.",
)
highlighted_sub_text: Optional[str] = Field(
default=None,
alias="highlightedSubText",
description="Highlighted snippet of the secondary text if available.",
)
highlighted_extra: Optional[str] = Field(
default=None,
alias="highlightedExtra",
description="Highlighted snippet of extra information if available.",
)
model_config = ConfigDict(populate_by_name=True)
class SearchResponse(BaseModel):
"""Structured response returned by the search tool."""
keyword: str = Field(description="The keyword that was searched.")
total: int = Field(description="Total number of matches returned by the backend.")
results: list[SearchResultItem] = Field(
default_factory=list,
description="Ordered collection of search results.",
)
class AuthorInfo(BaseModel):
"""Summary of a post or comment author."""
id: Optional[int] = Field(default=None, description="Author identifier.")
username: Optional[str] = Field(default=None, description="Author username.")
avatar: Optional[str] = Field(default=None, description="URL of the author's avatar.")
display_medal: Optional[str] = Field(
default=None,
alias="displayMedal",
description="Medal displayed next to the author, when available.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
class CategoryInfo(BaseModel):
"""Basic information about a post category."""
id: Optional[int] = Field(default=None, description="Category identifier.")
name: Optional[str] = Field(default=None, description="Category name.")
description: Optional[str] = Field(
default=None, description="Human friendly description of the category."
)
icon: Optional[str] = Field(default=None, description="Icon URL associated with the category.")
small_icon: Optional[str] = Field(
default=None,
alias="smallIcon",
description="Compact icon URL for the category.",
)
count: Optional[int] = Field(default=None, description="Number of posts within the category.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class TagInfo(BaseModel):
"""Details for a tag assigned to a post."""
id: Optional[int] = Field(default=None, description="Tag identifier.")
name: Optional[str] = Field(default=None, description="Tag name.")
description: Optional[str] = Field(default=None, description="Description of the tag.")
icon: Optional[str] = Field(default=None, description="Icon URL for the tag.")
small_icon: Optional[str] = Field(
default=None,
alias="smallIcon",
description="Compact icon URL for the tag.",
)
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="When the tag was created.",
)
count: Optional[int] = Field(default=None, description="Number of posts using the tag.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class ReactionInfo(BaseModel):
"""Representation of a reaction on a post or comment."""
id: Optional[int] = Field(default=None, description="Reaction identifier.")
type: Optional[str] = Field(default=None, description="Reaction type (emoji, like, etc.).")
user: Optional[str] = Field(default=None, description="Username of the reacting user.")
post_id: Optional[int] = Field(
default=None,
alias="postId",
description="Related post identifier when applicable.",
)
comment_id: Optional[int] = Field(
default=None,
alias="commentId",
description="Related comment identifier when applicable.",
)
message_id: Optional[int] = Field(
default=None,
alias="messageId",
description="Related message identifier when applicable.",
)
reward: Optional[int] = Field(default=None, description="Reward granted for the reaction, if any.")
model_config = ConfigDict(populate_by_name=True, extra="allow")
class CommentData(BaseModel):
"""Comment information returned by the backend."""
id: Optional[int] = Field(default=None, description="Comment identifier.")
content: Optional[str] = Field(default=None, description="Markdown content of the comment.")
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="Timestamp when the comment was created.",
)
pinned_at: Optional[datetime] = Field(
default=None,
alias="pinnedAt",
description="Timestamp when the comment was pinned, if applicable.",
)
author: Optional[AuthorInfo] = Field(default=None, description="Author of the comment.")
replies: list["CommentData"] = Field(
default_factory=list,
description="Nested replies associated with the comment.",
)
reactions: list[ReactionInfo] = Field(
default_factory=list,
description="Reactions applied to the comment.",
)
reward: Optional[int] = Field(default=None, description="Reward gained by posting the comment.")
point_reward: Optional[int] = Field(
default=None,
alias="pointReward",
description="Points rewarded for the comment.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
class CommentReplyResult(BaseModel):
"""Structured response returned when replying to a comment."""
comment: CommentData = Field(description="Reply comment returned by the backend.")
class PostSummary(BaseModel):
"""Summary information for a post."""
id: Optional[int] = Field(default=None, description="Post identifier.")
title: Optional[str] = Field(default=None, description="Title of the post.")
content: Optional[str] = Field(default=None, description="Excerpt or content of the post.")
created_at: Optional[datetime] = Field(
default=None,
alias="createdAt",
description="When the post was created.",
)
author: Optional[AuthorInfo] = Field(default=None, description="Author who created the post.")
category: Optional[CategoryInfo] = Field(default=None, description="Category of the post.")
tags: list[TagInfo] = Field(default_factory=list, description="Tags assigned to the post.")
views: Optional[int] = Field(default=None, description="Total view count for the post.")
comment_count: Optional[int] = Field(
default=None,
alias="commentCount",
description="Number of comments on the post.",
)
status: Optional[str] = Field(default=None, description="Workflow status of the post.")
pinned_at: Optional[datetime] = Field(
default=None,
alias="pinnedAt",
description="When the post was pinned, if ever.",
)
last_reply_at: Optional[datetime] = Field(
default=None,
alias="lastReplyAt",
description="Timestamp of the most recent reply.",
)
reactions: list[ReactionInfo] = Field(
default_factory=list,
description="Reactions received by the post.",
)
participants: list[AuthorInfo] = Field(
default_factory=list,
description="Users participating in the discussion.",
)
subscribed: Optional[bool] = Field(
default=None,
description="Whether the current user is subscribed to the post.",
)
reward: Optional[int] = Field(default=None, description="Reward granted for the post.")
point_reward: Optional[int] = Field(
default=None,
alias="pointReward",
description="Points granted for the post.",
)
type: Optional[str] = Field(default=None, description="Type of the post.")
lottery: Optional[dict[str, Any]] = Field(
default=None, description="Lottery information for the post."
)
poll: Optional[dict[str, Any]] = Field(
default=None, description="Poll information for the post."
)
rss_excluded: Optional[bool] = Field(
default=None,
alias="rssExcluded",
description="Whether the post is excluded from RSS feeds.",
)
closed: Optional[bool] = Field(default=None, description="Whether the post is closed for replies.")
visible_scope: Optional[str] = Field(
default=None,
alias="visibleScope",
description="Visibility scope configuration for the post.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")
class RecentPostsResponse(BaseModel):
"""Structured response for the recent posts tool."""
minutes: int = Field(description="Time window, in minutes, used for the query.")
total: int = Field(description="Number of posts returned by the backend.")
posts: list[PostSummary] = Field(
default_factory=list,
description="Posts created within the requested time window.",
)
CommentData.model_rebuild()
class PostDetail(PostSummary):
"""Detailed information for a single post, including comments."""
comments: list[CommentData] = Field(
default_factory=list,
description="Comments that belong to the post.",
)
model_config = ConfigDict(populate_by_name=True, extra="allow")

View File

@@ -1,110 +0,0 @@
"""HTTP client helpers for talking to the OpenIsle backend endpoints."""
from __future__ import annotations
import json
from typing import Any
import httpx
class SearchClient:
"""Client for calling the OpenIsle HTTP APIs used by the MCP server."""
def __init__(self, base_url: str, *, timeout: float = 10.0) -> None:
self._base_url = base_url.rstrip("/")
self._timeout = timeout
self._client: httpx.AsyncClient | None = None
def _get_client(self) -> httpx.AsyncClient:
if self._client is None:
self._client = httpx.AsyncClient(base_url=self._base_url, timeout=self._timeout)
return self._client
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
"""Call the global search endpoint and return the parsed JSON payload."""
client = self._get_client()
response = await client.get(
"/api/search/global",
params={"keyword": keyword},
headers={"Accept": "application/json"},
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
return [self._ensure_dict(entry) for entry in payload]
async def reply_to_comment(
self,
comment_id: int,
token: str,
content: str,
captcha: str | None = None,
) -> dict[str, Any]:
"""Reply to an existing comment and return the created reply."""
client = self._get_client()
headers = {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
}
payload: dict[str, Any] = {"content": content}
if captcha is not None:
stripped_captcha = captcha.strip()
if stripped_captcha:
payload["captcha"] = stripped_captcha
response = await client.post(
f"/api/comments/{comment_id}/replies",
json=payload,
headers=headers,
)
response.raise_for_status()
return self._ensure_dict(response.json())
async def recent_posts(self, minutes: int) -> list[dict[str, Any]]:
"""Return posts created within the given timeframe."""
client = self._get_client()
response = await client.get(
"/api/posts/recent",
params={"minutes": minutes},
headers={"Accept": "application/json"},
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(
f"Unexpected response format from recent posts endpoint: {formatted}"
)
return [self._ensure_dict(entry) for entry in payload]
async def get_post(self, post_id: int, token: str | None = None) -> dict[str, Any]:
"""Retrieve the detailed payload for a single post."""
client = self._get_client()
headers = {"Accept": "application/json"}
if token:
headers["Authorization"] = f"Bearer {token}"
response = await client.get(f"/api/posts/{post_id}", headers=headers)
response.raise_for_status()
return self._ensure_dict(response.json())
async def aclose(self) -> None:
"""Dispose of the underlying HTTP client."""
if self._client is not None:
await self._client.aclose()
self._client = None
@staticmethod
def _ensure_dict(entry: Any) -> dict[str, Any]:
if not isinstance(entry, dict):
raise ValueError(f"Expected JSON object, got: {type(entry)!r}")
return entry

View File

@@ -1,305 +1,164 @@
"""Entry point for running the OpenIsle MCP server."""
"""Entry point for the OpenIsle MCP server."""
from __future__ import annotations
from contextlib import asynccontextmanager
from typing import Annotated
import argparse
import logging
import os
from typing import Annotated, Optional
import httpx
from mcp.server.fastmcp import Context, FastMCP
from pydantic import ValidationError
from pydantic import Field as PydanticField
from mcp.server.fastmcp import exceptions as mcp_exceptions
from pydantic import Field
from .config import get_settings
from .schemas import (
CommentData,
CommentReplyResult,
PostDetail,
PostSummary,
RecentPostsResponse,
SearchResponse,
SearchResultItem,
)
from .search_client import SearchClient
from .client import BackendClientError, OpenIsleBackendClient
from .models import BackendSearchResult, SearchResponse, SearchResult
settings = get_settings()
search_client = SearchClient(
str(settings.backend_base_url), timeout=settings.request_timeout
)
logger = logging.getLogger(__name__)
APP_NAME = "openisle-mcp"
DEFAULT_BACKEND_URL = "http://springboot:8080"
DEFAULT_TRANSPORT = "stdio"
DEFAULT_TIMEOUT = 10.0
DEFAULT_LIMIT = 20
MAX_LIMIT = 50
@asynccontextmanager
async def lifespan(_: FastMCP):
"""Lifecycle hook that disposes shared resources when the server stops."""
try:
yield
finally:
await search_client.aclose()
app = FastMCP(
name="openisle-mcp",
server = FastMCP(
APP_NAME,
instructions=(
"Use this server to search OpenIsle content, reply to comments with an authentication "
"token, retrieve details for a specific post, and list posts created within a recent time "
"window."
"Use the `search` tool to query OpenIsle content. "
"Results include posts, comments, users, categories, and tags."
),
host=settings.host,
port=settings.port,
lifespan=lifespan,
)
@app.tool(
name="search",
description="Perform a global search across OpenIsle resources.",
structured_output=True,
def _env(name: str, default: Optional[str] = None) -> Optional[str]:
value = os.getenv(name, default)
if value is None:
return None
trimmed = value.strip()
return trimmed or default
def _load_timeout() -> float:
raw = _env("OPENISLE_BACKEND_TIMEOUT", str(DEFAULT_TIMEOUT))
try:
timeout = float(raw) if raw is not None else DEFAULT_TIMEOUT
except ValueError:
logger.warning("Invalid OPENISLE_BACKEND_TIMEOUT value '%s', falling back to %s", raw, DEFAULT_TIMEOUT)
return DEFAULT_TIMEOUT
if timeout <= 0:
logger.warning("Non-positive OPENISLE_BACKEND_TIMEOUT %s, falling back to %s", timeout, DEFAULT_TIMEOUT)
return DEFAULT_TIMEOUT
return timeout
_BACKEND_CLIENT = OpenIsleBackendClient(
base_url=_env("OPENISLE_BACKEND_URL", DEFAULT_BACKEND_URL) or DEFAULT_BACKEND_URL,
timeout=_load_timeout(),
)
async def search(
keyword: Annotated[str, PydanticField(description="Keyword to search for.")],
ctx: Context | None = None,
) -> SearchResponse:
"""Call the OpenIsle global search endpoint and return structured results."""
_PUBLIC_BASE_URL = _env("OPENISLE_PUBLIC_BASE_URL")
sanitized = keyword.strip()
if not sanitized:
raise ValueError("Keyword must not be empty.")
try:
raw_results = await search_client.global_search(sanitized)
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
message = (
"OpenIsle backend returned HTTP "
f"{exc.response.status_code} while searching for '{sanitized}'."
)
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors
message = f"Unable to reach OpenIsle backend search service: {exc}."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
def _build_url(result: BackendSearchResult) -> Optional[str]:
if not _PUBLIC_BASE_URL:
return None
base = _PUBLIC_BASE_URL.rstrip("/")
if result.type in {"post", "post_title"} and result.id is not None:
return f"{base}/posts/{result.id}"
if result.type == "comment" and result.post_id is not None:
anchor = f"#comment-{result.id}" if result.id is not None else ""
return f"{base}/posts/{result.post_id}{anchor}"
if result.type == "user" and result.id is not None:
return f"{base}/users/{result.id}"
if result.type == "category" and result.id is not None:
return f"{base}/?categoryId={result.id}"
if result.type == "tag" and result.id is not None:
return f"{base}/?tagIds={result.id}"
return None
try:
results = [SearchResultItem.model_validate(entry) for entry in raw_results]
except ValidationError as exc:
message = "Received malformed data from the OpenIsle backend search endpoint."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
def _to_search_result(result: BackendSearchResult) -> SearchResult:
highlights = {
"text": result.highlighted_text,
"subText": result.highlighted_sub_text,
"extra": result.highlighted_extra,
}
# Remove empty highlight entries to keep the payload clean
highlights = {key: value for key, value in highlights.items() if value}
return SearchResult(
type=result.type,
id=result.id,
title=result.text,
subtitle=result.sub_text,
extra=result.extra,
post_id=result.post_id,
url=_build_url(result),
highlights=highlights,
)
KeywordParam = Annotated[str, Field(description="Keyword to search for", min_length=1)]
LimitParam = Annotated[
int,
Field(ge=1, le=MAX_LIMIT, description=f"Maximum number of results to return (<= {MAX_LIMIT})"),
]
@server.tool(name="search", description="Search OpenIsle content")
async def search(keyword: KeywordParam, limit: LimitParam = DEFAULT_LIMIT, ctx: Optional[Context] = None) -> SearchResponse:
"""Run a search query against the OpenIsle backend."""
trimmed = keyword.strip()
if not trimmed:
raise mcp_exceptions.ToolError("Keyword must not be empty")
if ctx is not None:
await ctx.info(f"Search keyword '{sanitized}' returned {len(results)} results.")
return SearchResponse(keyword=sanitized, total=len(results), results=results)
@app.tool(
name="reply_to_comment",
description="Reply to an existing comment using an authentication token.",
structured_output=True,
)
async def reply_to_comment(
comment_id: Annotated[
int,
PydanticField(ge=1, description="Identifier of the comment being replied to."),
],
token: Annotated[str, PydanticField(description="JWT bearer token for the user performing the reply.")],
content: Annotated[
str,
PydanticField(description="Markdown content of the reply."),
],
captcha: Annotated[
str | None,
PydanticField(
default=None,
description="Optional captcha solution if the backend requires it.",
),
] = None,
ctx: Context | None = None,
) -> CommentReplyResult:
"""Create a reply for a comment and return the backend payload."""
sanitized_content = content.strip()
if not sanitized_content:
raise ValueError("Reply content must not be empty.")
sanitized_token = token.strip()
if not sanitized_token:
raise ValueError("Authentication token must not be empty.")
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
await ctx.debug(f"Searching OpenIsle for '{trimmed}' (limit={limit})")
try:
raw_comment = await search_client.reply_to_comment(
comment_id,
sanitized_token,
sanitized_content,
sanitized_captcha,
)
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
status_code = exc.response.status_code
if status_code == 401:
message = (
"Authentication failed while replying to comment "
f"{comment_id}. Please verify the token."
)
elif status_code == 403:
message = (
"The provided token is not authorized to reply to comment "
f"{comment_id}."
)
else:
message = (
"OpenIsle backend returned HTTP "
f"{status_code} while replying to comment {comment_id}."
)
raw_results = await _BACKEND_CLIENT.search_global(trimmed)
except BackendClientError as exc:
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors
message = (
"Unable to reach OpenIsle backend comment service: "
f"{exc}."
)
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
await ctx.error(f"Search request failed: {exc}")
raise mcp_exceptions.ToolError(f"Search failed: {exc}") from exc
try:
comment = CommentData.model_validate(raw_comment)
except ValidationError as exc:
message = "Received malformed data from the reply comment endpoint."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
results = [_to_search_result(result) for result in raw_results]
limited = results[:limit]
if ctx is not None:
await ctx.info(
"Reply created successfully for comment "
f"{comment_id}."
"Search completed",
keyword=trimmed,
total_results=len(results),
returned=len(limited),
)
return CommentReplyResult(comment=comment)
@app.tool(
name="recent_posts",
description="Retrieve posts created in the last N minutes.",
structured_output=True,
)
async def recent_posts(
minutes: Annotated[
int,
PydanticField(gt=0, le=1440, description="Time window in minutes to search for new posts."),
],
ctx: Context | None = None,
) -> RecentPostsResponse:
"""Fetch recent posts from the backend and return structured data."""
try:
raw_posts = await search_client.recent_posts(minutes)
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
message = (
"OpenIsle backend returned HTTP "
f"{exc.response.status_code} while fetching recent posts for the last {minutes} minutes."
)
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors
message = f"Unable to reach OpenIsle backend recent posts service: {exc}."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
try:
posts = [PostSummary.model_validate(entry) for entry in raw_posts]
except ValidationError as exc:
message = "Received malformed data from the recent posts endpoint."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
if ctx is not None:
await ctx.info(
f"Found {len(posts)} posts created within the last {minutes} minutes."
)
return RecentPostsResponse(minutes=minutes, total=len(posts), posts=posts)
@app.tool(
name="get_post",
description="Retrieve detailed information for a single post.",
structured_output=True,
)
async def get_post(
post_id: Annotated[
int,
PydanticField(ge=1, description="Identifier of the post to retrieve."),
],
token: Annotated[
str | None,
PydanticField(
default=None,
description="Optional JWT bearer token to view the post as an authenticated user.",
),
] = None,
ctx: Context | None = None,
) -> PostDetail:
"""Fetch post details from the backend and validate the response."""
sanitized_token = token.strip() if isinstance(token, str) else None
if sanitized_token == "":
sanitized_token = None
try:
raw_post = await search_client.get_post(post_id, sanitized_token)
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
status_code = exc.response.status_code
if status_code == 404:
message = f"Post {post_id} was not found."
elif status_code == 401:
message = "Authentication failed while retrieving the post."
elif status_code == 403:
message = "The provided token is not authorized to view this post."
else:
message = (
"OpenIsle backend returned HTTP "
f"{status_code} while retrieving post {post_id}."
)
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors
message = f"Unable to reach OpenIsle backend post service: {exc}."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
try:
post = PostDetail.model_validate(raw_post)
except ValidationError as exc:
message = "Received malformed data from the post detail endpoint."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
if ctx is not None:
await ctx.info(f"Retrieved post {post_id} successfully.")
return post
return SearchResponse(keyword=trimmed, total_results=len(results), limit=limit, results=limited)
def main() -> None:
"""Run the MCP server using the configured transport."""
parser = argparse.ArgumentParser(description="Run the OpenIsle MCP server")
parser.add_argument(
"--transport",
choices=["stdio", "sse", "streamable-http"],
default=_env("OPENISLE_MCP_TRANSPORT", DEFAULT_TRANSPORT),
help="Transport protocol to use",
)
parser.add_argument(
"--mount-path",
default=_env("OPENISLE_MCP_SSE_MOUNT_PATH", "/mcp"),
help="Mount path when using the SSE transport",
)
args = parser.parse_args()
app.run(transport=settings.transport)
logging.basicConfig(level=os.getenv("OPENISLE_MCP_LOG_LEVEL", "INFO"))
logger.info(
"Starting OpenIsle MCP server", extra={"transport": args.transport, "backend": _BACKEND_CLIENT.base_url}
)
server.run(transport=args.transport, mount_path=args.mount_path)
if __name__ == "__main__": # pragma: no cover - manual execution
if __name__ == "__main__":
main()

View File

@@ -100,28 +100,10 @@ server {
# auth_basic_user_file /etc/nginx/.htpasswd;
}
# ---------- WEBSOCKET GATEWAY TO :8082 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8085;
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;

View File

@@ -8,8 +8,11 @@ server {
listen 443 ssl;
server_name staging.open-isle.com www.staging.open-isle.com;
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
# ssl_certificate /etc/letsencrypt/live/open-isle.com/fullchain.pem;
# ssl_certificate_key /etc/letsencrypt/live/open-isle.com/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
@@ -37,13 +40,59 @@ server {
add_header X-Upstream $upstream_addr always;
}
# 1) 原生 WebSocket
location ^~ /api/ws {
proxy_pass http://127.0.0.1:8081; # 不要尾随 /,保留原样 URI
proxy_http_version 1.1;
# 升级所需
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
# 统一透传这些头(你在 /api/ 有,/api/ws 也要有)
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
}
# 2) SockJS包含 /info、/iframe.html、/.../websocket 等)
location ^~ /api/sockjs {
proxy_pass http://127.0.0.1:8081;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
# 如要同源 iframe 回退,下面两行二选一(或者交给 Spring Security 的 sameOrigin
# proxy_hide_header X-Frame-Options;
# add_header X-Frame-Options "SAMEORIGIN" always;
}
# ---------- API ----------
location /api/ {
proxy_pass http://127.0.0.1:8081/api/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
@@ -60,6 +109,7 @@ server {
proxy_cache_bypass 1;
}
# ---------- WEBSOCKET GATEWAY TO :8083 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8083/;
proxy_http_version 1.1;
@@ -80,24 +130,4 @@ server {
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8086;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
}
}