Compare commits

..

1 Commits

Author SHA1 Message Date
Tim
ab91ec2489 feat: add MCP search service 2025-10-25 22:05:25 +08:00
18 changed files with 351 additions and 347 deletions

View File

@@ -2,11 +2,15 @@
SERVER_PORT=8080
FRONTEND_PORT=3000
WEBSOCKET_PORT=8082
OPENISLE_MCP_PORT=8085
MCP_PORT=9090
MYSQL_PORT=3306
REDIS_PORT=6379
RABBITMQ_PORT=5672
RABBITMQ_MANAGEMENT_PORT=15672
MCP_HOST=0.0.0.0
MCP_BACKEND_BASE_URL=http://springboot:8080
MCP_CONNECT_TIMEOUT=5
MCP_READ_TIMEOUT=10
# === OpenSearch Configuration ===
OPENSEARCH_PORT=9200

View File

@@ -40,12 +40,12 @@ echo "👉 Build images ..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=production \
frontend_service mcp
frontend_service mcp-service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
docker compose -f "$compose_file" --env-file "$env_file" \
build --pull \
--build-arg NUXT_ENV=staging \
frontend_service mcp
frontend_service mcp-service
echo "👉 Recreate & start all target services (no dev profile)..."
docker compose -f "$compose_file" --env-file "$env_file" \
up -d --force-recreate --remove-orphans --no-deps \
mysql redis rabbitmq websocket-service springboot frontend_service mcp
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
echo "👉 Current status:"
docker compose -f "$compose_file" --env-file "$env_file" ps

View File

@@ -178,32 +178,38 @@ services:
- dev
- prod
mcp:
mcp-service:
build:
context: ..
dockerfile: docker/mcp.Dockerfile
dockerfile: mcp/Dockerfile
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
env_file:
- ${ENV_FILE:-../.env}
environment:
OPENISLE_MCP_BACKEND_BASE_URL: http://springboot:${SERVER_PORT:-8080}
OPENISLE_MCP_HOST: 0.0.0.0
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8085}
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
OPENISLE_MCP_REQUEST_TIMEOUT: ${OPENISLE_MCP_REQUEST_TIMEOUT:-10.0}
MCP_HOST: ${MCP_HOST:-0.0.0.0}
MCP_PORT: ${MCP_PORT:-9090}
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
ports:
- "${OPENISLE_MCP_PORT:-8085}:${OPENISLE_MCP_PORT:-8085}"
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
depends_on:
springboot:
condition: service_started
condition: service_healthy
command: ["openisle-mcp"]
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
interval: 10s
timeout: 5s
retries: 30
start_period: 20s
restart: unless-stopped
networks:
- openisle-network
profiles:
- dev
- dev_local_backend
- prod
websocket-service:
image: maven:3.9-eclipse-temurin-17
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket

View File

@@ -1,21 +0,0 @@
FROM python:3.11-slim AS base
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
COPY mcp/pyproject.toml mcp/README.md ./
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
ENV OPENISLE_MCP_HOST=0.0.0.0 \
OPENISLE_MCP_PORT=8085 \
OPENISLE_MCP_TRANSPORT=streamable-http
EXPOSE 8085
CMD ["openisle-mcp"]

21
mcp/Dockerfile Normal file
View File

@@ -0,0 +1,21 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl \
&& rm -rf /var/lib/apt/lists/*
COPY mcp/pyproject.toml ./pyproject.toml
COPY mcp/README.md ./README.md
COPY mcp/src ./src
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir .
EXPOSE 9090
CMD ["openisle-mcp"]

View File

@@ -1,37 +1,34 @@
# OpenIsle MCP Server
# OpenIsle MCP Service
This package provides a [Model Context Protocol](https://modelcontextprotocol.io) (MCP) server
that exposes OpenIsle's search capabilities as MCP tools. The initial release focuses on the
global search endpoint so the agent ecosystem can retrieve relevant posts, users, tags, and
other resources.
This package hosts a lightweight Python service that exposes OpenIsle search
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
It currently forwards search requests to the main Spring Boot backend and
returns the aggregated results. The service is intentionally simple so we can
iterate quickly and extend it with additional tools (for example, post
creation) in future updates.
## Configuration
The server is configured through environment variables (all prefixed with `OPENISLE_MCP_`):
| Variable | Default | Description |
| --- | --- | --- |
| `BACKEND_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend. |
| `PORT` | `8085` | TCP port when running with the `streamable-http` transport. |
| `HOST` | `0.0.0.0` | Interface to bind when serving HTTP. |
| `TRANSPORT` | `streamable-http` | Transport to use (`stdio`, `sse`, or `streamable-http`). |
| `REQUEST_TIMEOUT` | `10.0` | Timeout (seconds) for backend HTTP requests. |
## Running locally
## Local development
```bash
pip install .
OPENISLE_MCP_BACKEND_BASE_URL="http://localhost:8080" openisle-mcp
pip install -e ./mcp
openisle-mcp
```
By default the server listens on port `8085` and serves MCP over Streamable HTTP.
By default the server listens on port `9090` and expects the Spring Boot backend
at `http://localhost:8080`. Configure the behaviour with the following
environment variables:
## Available tools
- `MCP_PORT` HTTP port the MCP service should listen on (default: `9090`).
- `MCP_HOST` Bind host for the HTTP server (default: `0.0.0.0`).
- `MCP_BACKEND_BASE_URL` Base URL of the Spring Boot backend that provides the
search endpoints (default: `http://springboot:8080`).
- `MCP_CONNECT_TIMEOUT` Connection timeout (seconds) when calling the backend
(default: `5`).
- `MCP_READ_TIMEOUT` Read timeout (seconds) when calling the backend (default:
`10`).
| Tool | Description |
| --- | --- |
| `search` | Perform a global search against the OpenIsle backend. |
The tool returns structured data describing each search hit including highlighted snippets when
provided by the backend.
## Docker
The repository contains a Dockerfile that builds a slim Python image running the
service with `uvicorn`. The compose configuration wires the container into the
existing OpenIsle stack so that deployments automatically start the MCP service.

View File

@@ -1,27 +1,25 @@
[build-system]
requires = ["hatchling>=1.25"]
requires = ["hatchling>=1.21.0"]
build-backend = "hatchling.build"
[project]
name = "openisle-mcp"
version = "0.1.0"
description = "Model Context Protocol server exposing OpenIsle search capabilities."
description = "Model Context Protocol server exposing OpenIsle search capabilities"
readme = "README.md"
authors = [{ name = "OpenIsle", email = "engineering@openisle.example" }]
authors = [
{ name = "OpenIsle" }
]
requires-python = ">=3.11"
dependencies = [
"mcp>=1.19.0",
"httpx>=0.28,<0.29",
"pydantic>=2.12,<3",
"pydantic-settings>=2.11,<3"
"fastapi>=0.111.0,<1.0.0",
"uvicorn[standard]>=0.29.0,<0.31.0",
"httpx>=0.27.0,<0.28.0",
"pydantic>=2.7.0,<3.0.0"
]
[project.scripts]
openisle-mcp = "openisle_mcp.server:main"
openisle-mcp = "openisle_mcp.__main__:main"
[tool.hatch.build]
[tool.hatch.build.targets.wheel]
packages = ["src/openisle_mcp"]
[tool.ruff]
line-length = 100

View File

@@ -1,6 +1,6 @@
"""OpenIsle MCP server package."""
"""OpenIsle MCP service package."""
from .config import Settings, get_settings
from .server import create_app
__all__ = ["Settings", "get_settings"]
__all__ = ["Settings", "get_settings", "create_app"]

View File

@@ -0,0 +1,24 @@
"""Entrypoint for running the MCP service with ``python -m``."""
from __future__ import annotations
import logging
import uvicorn
from .config import get_settings
def main() -> None:
settings = get_settings()
logging.basicConfig(level=logging.INFO)
uvicorn.run(
"openisle_mcp.server:create_app",
host=settings.host,
port=settings.port,
factory=True,
)
if __name__ == "__main__": # pragma: no cover
main()

View File

@@ -0,0 +1,44 @@
"""HTTP client helpers for talking to the Spring Boot backend."""
from __future__ import annotations
import logging
from typing import Any
import httpx
from .config import Settings
LOGGER = logging.getLogger(__name__)
class SearchClient:
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
def __init__(self, settings: Settings):
timeout = httpx.Timeout(
connect=settings.connect_timeout,
read=settings.read_timeout,
write=settings.read_timeout,
pool=None,
)
self._client = httpx.AsyncClient(
base_url=settings.normalized_backend_base_url,
timeout=timeout,
)
async def close(self) -> None:
await self._client.aclose()
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
LOGGER.debug("Performing global search for keyword '%s'", keyword)
response = await self._client.get("/api/search/global", params={"keyword": keyword})
response.raise_for_status()
payload = response.json()
if isinstance(payload, list):
return payload
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
return []
__all__ = ["SearchClient"]

View File

@@ -1,52 +1,71 @@
"""Application configuration helpers for the OpenIsle MCP server."""
"""Configuration helpers for the MCP service."""
from __future__ import annotations
import os
from functools import lru_cache
from typing import Literal
from typing import Any
from pydantic import Field
from pydantic.networks import AnyHttpUrl
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import BaseModel, ConfigDict, Field, ValidationError
class Settings(BaseSettings):
"""Configuration for the MCP server."""
class Settings(BaseModel):
"""Application settings sourced from environment variables."""
backend_base_url: AnyHttpUrl = Field(
"http://springboot:8080",
description="Base URL for the OpenIsle backend service.",
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
backend_base_url: str = Field(
default="http://springboot:8080",
description="Base URL of the Spring Boot backend that provides search endpoints",
)
host: str = Field(
"0.0.0.0",
description="Host interface to bind when running with HTTP transports.",
connect_timeout: float = Field(
default=5.0,
ge=0.0,
description="Connection timeout when communicating with the backend (seconds)",
)
port: int = Field(
8085,
ge=1,
le=65535,
description="TCP port for HTTP transports.",
)
transport: Literal["stdio", "sse", "streamable-http"] = Field(
"streamable-http",
description="MCP transport to use when running the server.",
)
request_timeout: float = Field(
10.0,
gt=0,
description="Timeout (seconds) for backend search requests.",
read_timeout: float = Field(
default=10.0,
ge=0.0,
description="Read timeout when communicating with the backend (seconds)",
)
model_config = SettingsConfigDict(
env_prefix="OPENISLE_MCP_",
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
model_config = ConfigDict(extra="ignore")
@property
def normalized_backend_base_url(self) -> str:
"""Return the backend base URL without a trailing slash."""
return self.backend_base_url.rstrip("/")
ENV_MAPPING: dict[str, str] = {
"host": "MCP_HOST",
"port": "MCP_PORT",
"backend_base_url": "MCP_BACKEND_BASE_URL",
"connect_timeout": "MCP_CONNECT_TIMEOUT",
"read_timeout": "MCP_READ_TIMEOUT",
}
def _load_environment_values() -> dict[str, Any]:
values: dict[str, Any] = {}
for field, env_name in ENV_MAPPING.items():
value = os.getenv(env_name)
if value is None:
continue
values[field] = value
return values
@lru_cache(maxsize=1)
def get_settings() -> Settings:
"""Return cached application settings."""
"""Load and validate application settings."""
return Settings()
values = _load_environment_values()
try:
return Settings(**values)
except ValidationError as exc: # pragma: no cover - defensive branch
raise RuntimeError("Invalid MCP configuration") from exc
__all__ = ["Settings", "get_settings"]

View File

@@ -0,0 +1,38 @@
"""Pydantic models shared across the MCP service."""
from __future__ import annotations
from typing import Optional
from pydantic import BaseModel, ConfigDict, Field
class SearchResult(BaseModel):
"""Representation of a single search result entry."""
model_config = ConfigDict(extra="ignore")
type: Optional[str] = Field(default=None, description="Type of the result entry")
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
extra: Optional[str] = Field(default=None, description="Additional information about the result")
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
highlightedSubText: Optional[str] = Field(
default=None,
description="Highlighted secondary text segment",
)
highlightedExtra: Optional[str] = Field(
default=None,
description="Highlighted additional information",
)
class SearchResponse(BaseModel):
"""Response payload returned by the search endpoint."""
results: list[SearchResult] = Field(default_factory=list)
__all__ = ["SearchResult", "SearchResponse"]

View File

@@ -1,55 +0,0 @@
"""Pydantic models describing tool inputs and outputs."""
from __future__ import annotations
from typing import Optional
from pydantic import BaseModel, Field, ConfigDict
class SearchResultItem(BaseModel):
"""A single search result entry."""
type: str = Field(description="Entity type for the result (post, user, tag, etc.).")
id: Optional[int] = Field(default=None, description="Identifier of the matched entity.")
text: Optional[str] = Field(default=None, description="Primary text associated with the result.")
sub_text: Optional[str] = Field(
default=None,
alias="subText",
description="Secondary text, e.g. a username or excerpt.",
)
extra: Optional[str] = Field(default=None, description="Additional contextual information.")
post_id: Optional[int] = Field(
default=None,
alias="postId",
description="Associated post identifier when relevant.",
)
highlighted_text: Optional[str] = Field(
default=None,
alias="highlightedText",
description="Highlighted snippet of the primary text if available.",
)
highlighted_sub_text: Optional[str] = Field(
default=None,
alias="highlightedSubText",
description="Highlighted snippet of the secondary text if available.",
)
highlighted_extra: Optional[str] = Field(
default=None,
alias="highlightedExtra",
description="Highlighted snippet of extra information if available.",
)
model_config = ConfigDict(populate_by_name=True)
class SearchResponse(BaseModel):
"""Structured response returned by the search tool."""
keyword: str = Field(description="The keyword that was searched.")
total: int = Field(description="Total number of matches returned by the backend.")
results: list[SearchResultItem] = Field(
default_factory=list,
description="Ordered collection of search results.",
)

View File

@@ -1,51 +0,0 @@
"""HTTP client helpers for talking to the OpenIsle backend search endpoints."""
from __future__ import annotations
import json
from typing import Any
import httpx
class SearchClient:
"""Client for calling the OpenIsle search API."""
def __init__(self, base_url: str, *, timeout: float = 10.0) -> None:
self._base_url = base_url.rstrip("/")
self._timeout = timeout
self._client: httpx.AsyncClient | None = None
def _get_client(self) -> httpx.AsyncClient:
if self._client is None:
self._client = httpx.AsyncClient(base_url=self._base_url, timeout=self._timeout)
return self._client
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
"""Call the global search endpoint and return the parsed JSON payload."""
client = self._get_client()
response = await client.get(
"/api/search/global",
params={"keyword": keyword},
headers={"Accept": "application/json"},
)
response.raise_for_status()
payload = response.json()
if not isinstance(payload, list):
formatted = json.dumps(payload, ensure_ascii=False)[:200]
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
return [self._validate_entry(entry) for entry in payload]
async def aclose(self) -> None:
"""Dispose of the underlying HTTP client."""
if self._client is not None:
await self._client.aclose()
self._client = None
@staticmethod
def _validate_entry(entry: Any) -> dict[str, Any]:
if not isinstance(entry, dict):
raise ValueError(f"Search entry must be an object, got: {type(entry)!r}")
return entry

View File

@@ -1,98 +1,66 @@
"""Entry point for running the OpenIsle MCP server."""
"""FastAPI application exposing the MCP server endpoints."""
from __future__ import annotations
from contextlib import asynccontextmanager
from typing import Annotated
import logging
from fastapi import Depends, FastAPI, HTTPException, Query, Request
import httpx
from mcp.server.fastmcp import Context, FastMCP
from pydantic import ValidationError
from pydantic import Field as PydanticField
from .client import SearchClient
from .config import get_settings
from .schemas import SearchResponse, SearchResultItem
from .search_client import SearchClient
from .models import SearchResponse, SearchResult
settings = get_settings()
search_client = SearchClient(
str(settings.backend_base_url), timeout=settings.request_timeout
)
LOGGER = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(_: FastMCP):
"""Lifecycle hook that disposes shared resources when the server stops."""
async def _lifespan(app: FastAPI):
settings = get_settings()
client = SearchClient(settings)
app.state.settings = settings
app.state.search_client = client
LOGGER.info(
"Starting MCP server on %s:%s targeting backend %s",
settings.host,
settings.port,
settings.normalized_backend_base_url,
)
try:
yield
finally:
await search_client.aclose()
LOGGER.info("Shutting down MCP server")
await client.close()
app = FastMCP(
name="openisle-mcp",
instructions=(
"Use this server to search OpenIsle posts, users, tags, categories, and comments "
"via the global search endpoint."
),
host=settings.host,
port=settings.port,
lifespan=lifespan,
)
def create_app() -> FastAPI:
"""Create and configure the FastAPI application."""
app = FastAPI(title="OpenIsle MCP Server", lifespan=_lifespan)
@app.get("/healthz", tags=["health"])
async def healthcheck() -> dict[str, str]:
return {"status": "ok"}
async def get_client(request: Request) -> SearchClient:
return request.app.state.search_client
@app.get("/search", response_model=SearchResponse, tags=["search"])
async def search(
keyword: str = Query(..., min_length=1, description="Keyword to search for"),
client: SearchClient = Depends(get_client),
) -> SearchResponse:
try:
raw_results = await client.global_search(keyword)
except httpx.HTTPStatusError as exc:
LOGGER.warning("Backend responded with error %s", exc.response.status_code)
raise HTTPException(status_code=exc.response.status_code, detail="Backend error") from exc
except httpx.HTTPError as exc:
LOGGER.error("Failed to reach backend: %s", exc)
raise HTTPException(status_code=503, detail="Search service unavailable") from exc
results = [SearchResult.model_validate(item) for item in raw_results]
return SearchResponse(results=results)
return app
@app.tool(
name="search",
description="Perform a global search across OpenIsle resources.",
structured_output=True,
)
async def search(
keyword: Annotated[str, PydanticField(description="Keyword to search for.")],
ctx: Context | None = None,
) -> SearchResponse:
"""Call the OpenIsle global search endpoint and return structured results."""
sanitized = keyword.strip()
if not sanitized:
raise ValueError("Keyword must not be empty.")
try:
raw_results = await search_client.global_search(sanitized)
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
message = (
"OpenIsle backend returned HTTP "
f"{exc.response.status_code} while searching for '{sanitized}'."
)
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
except httpx.RequestError as exc: # pragma: no cover - network errors
message = f"Unable to reach OpenIsle backend search service: {exc}."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
try:
results = [SearchResultItem.model_validate(entry) for entry in raw_results]
except ValidationError as exc:
message = "Received malformed data from the OpenIsle backend search endpoint."
if ctx is not None:
await ctx.error(message)
raise ValueError(message) from exc
if ctx is not None:
await ctx.info(f"Search keyword '{sanitized}' returned {len(results)} results.")
return SearchResponse(keyword=sanitized, total=len(results), results=results)
def main() -> None:
"""Run the MCP server using the configured transport."""
app.run(transport=settings.transport)
if __name__ == "__main__": # pragma: no cover - manual execution
main()
__all__ = ["create_app"]

View File

@@ -100,28 +100,10 @@ server {
# auth_basic_user_file /etc/nginx/.htpasswd;
}
# ---------- WEBSOCKET GATEWAY TO :8082 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8085;
proxy_pass http://127.0.0.1:8084/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;

View File

@@ -8,8 +8,11 @@ server {
listen 443 ssl;
server_name staging.open-isle.com www.staging.open-isle.com;
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
# ssl_certificate /etc/letsencrypt/live/open-isle.com/fullchain.pem;
# ssl_certificate_key /etc/letsencrypt/live/open-isle.com/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
@@ -37,13 +40,59 @@ server {
add_header X-Upstream $upstream_addr always;
}
# 1) 原生 WebSocket
location ^~ /api/ws {
proxy_pass http://127.0.0.1:8081; # 不要尾随 /,保留原样 URI
proxy_http_version 1.1;
# 升级所需
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
# 统一透传这些头(你在 /api/ 有,/api/ws 也要有)
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
}
# 2) SockJS包含 /info、/iframe.html、/.../websocket 等)
location ^~ /api/sockjs {
proxy_pass http://127.0.0.1:8081;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
# 如要同源 iframe 回退,下面两行二选一(或者交给 Spring Security 的 sameOrigin
# proxy_hide_header X-Frame-Options;
# add_header X-Frame-Options "SAMEORIGIN" always;
}
# ---------- API ----------
location /api/ {
proxy_pass http://127.0.0.1:8081/api/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
@@ -60,6 +109,7 @@ server {
proxy_cache_bypass 1;
}
# ---------- WEBSOCKET GATEWAY TO :8083 ----------
location ^~ /websocket/ {
proxy_pass http://127.0.0.1:8083/;
proxy_http_version 1.1;
@@ -80,24 +130,4 @@ server {
add_header Cache-Control "no-store" always;
}
location /mcp {
proxy_pass http://127.0.0.1:8086;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_read_timeout 300s;
proxy_send_timeout 300s;
proxy_buffering off;
proxy_cache off;
add_header Cache-Control "no-store" always;
}
}
}