mirror of
https://github.com/nagisa77/OpenIsle.git
synced 2026-02-21 22:41:05 +08:00
Compare commits
1 Commits
codex/impr
...
codex/crea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab91ec2489 |
@@ -2,11 +2,15 @@
|
|||||||
SERVER_PORT=8080
|
SERVER_PORT=8080
|
||||||
FRONTEND_PORT=3000
|
FRONTEND_PORT=3000
|
||||||
WEBSOCKET_PORT=8082
|
WEBSOCKET_PORT=8082
|
||||||
OPENISLE_MCP_PORT=8085
|
MCP_PORT=9090
|
||||||
MYSQL_PORT=3306
|
MYSQL_PORT=3306
|
||||||
REDIS_PORT=6379
|
REDIS_PORT=6379
|
||||||
RABBITMQ_PORT=5672
|
RABBITMQ_PORT=5672
|
||||||
RABBITMQ_MANAGEMENT_PORT=15672
|
RABBITMQ_MANAGEMENT_PORT=15672
|
||||||
|
MCP_HOST=0.0.0.0
|
||||||
|
MCP_BACKEND_BASE_URL=http://springboot:8080
|
||||||
|
MCP_CONNECT_TIMEOUT=5
|
||||||
|
MCP_READ_TIMEOUT=10
|
||||||
|
|
||||||
# === OpenSearch Configuration ===
|
# === OpenSearch Configuration ===
|
||||||
OPENSEARCH_PORT=9200
|
OPENSEARCH_PORT=9200
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
package com.openisle.controller;
|
package com.openisle.controller;
|
||||||
|
|
||||||
import com.openisle.dto.CommentContextDto;
|
|
||||||
import com.openisle.dto.CommentDto;
|
import com.openisle.dto.CommentDto;
|
||||||
import com.openisle.dto.CommentRequest;
|
import com.openisle.dto.CommentRequest;
|
||||||
import com.openisle.dto.PostChangeLogDto;
|
import com.openisle.dto.PostChangeLogDto;
|
||||||
import com.openisle.dto.TimelineItemDto;
|
import com.openisle.dto.TimelineItemDto;
|
||||||
import com.openisle.mapper.CommentMapper;
|
import com.openisle.mapper.CommentMapper;
|
||||||
import com.openisle.mapper.PostChangeLogMapper;
|
import com.openisle.mapper.PostChangeLogMapper;
|
||||||
import com.openisle.mapper.PostMapper;
|
|
||||||
import com.openisle.model.Comment;
|
import com.openisle.model.Comment;
|
||||||
import com.openisle.model.CommentSort;
|
import com.openisle.model.CommentSort;
|
||||||
import com.openisle.service.*;
|
import com.openisle.service.*;
|
||||||
@@ -42,7 +40,6 @@ public class CommentController {
|
|||||||
private final PointService pointService;
|
private final PointService pointService;
|
||||||
private final PostChangeLogService changeLogService;
|
private final PostChangeLogService changeLogService;
|
||||||
private final PostChangeLogMapper postChangeLogMapper;
|
private final PostChangeLogMapper postChangeLogMapper;
|
||||||
private final PostMapper postMapper;
|
|
||||||
|
|
||||||
@Value("${app.captcha.enabled:false}")
|
@Value("${app.captcha.enabled:false}")
|
||||||
private boolean captchaEnabled;
|
private boolean captchaEnabled;
|
||||||
@@ -187,37 +184,6 @@ public class CommentController {
|
|||||||
return itemDtoList;
|
return itemDtoList;
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/comments/{commentId}/context")
|
|
||||||
@Operation(
|
|
||||||
summary = "Comment context",
|
|
||||||
description = "Get a comment along with its previous comments and related post"
|
|
||||||
)
|
|
||||||
@ApiResponse(
|
|
||||||
responseCode = "200",
|
|
||||||
description = "Comment context",
|
|
||||||
content = @Content(schema = @Schema(implementation = CommentContextDto.class))
|
|
||||||
)
|
|
||||||
public ResponseEntity<CommentContextDto> getCommentContext(@PathVariable Long commentId) {
|
|
||||||
log.debug("getCommentContext called for comment {}", commentId);
|
|
||||||
Comment comment = commentService.getComment(commentId);
|
|
||||||
CommentContextDto dto = new CommentContextDto();
|
|
||||||
dto.setPost(postMapper.toSummaryDto(comment.getPost()));
|
|
||||||
dto.setTargetComment(commentMapper.toDtoWithReplies(comment));
|
|
||||||
dto.setPreviousComments(
|
|
||||||
commentService
|
|
||||||
.getCommentsBefore(comment)
|
|
||||||
.stream()
|
|
||||||
.map(commentMapper::toDtoWithReplies)
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
);
|
|
||||||
log.debug(
|
|
||||||
"getCommentContext returning {} previous comments for comment {}",
|
|
||||||
dto.getPreviousComments().size(),
|
|
||||||
commentId
|
|
||||||
);
|
|
||||||
return ResponseEntity.ok(dto);
|
|
||||||
}
|
|
||||||
|
|
||||||
@DeleteMapping("/comments/{id}")
|
@DeleteMapping("/comments/{id}")
|
||||||
@Operation(summary = "Delete comment", description = "Delete a comment")
|
@Operation(summary = "Delete comment", description = "Delete a comment")
|
||||||
@ApiResponse(responseCode = "200", description = "Deleted")
|
@ApiResponse(responseCode = "200", description = "Deleted")
|
||||||
|
|||||||
@@ -224,26 +224,6 @@ public class PostController {
|
|||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/recent")
|
|
||||||
@Operation(
|
|
||||||
summary = "Recent posts",
|
|
||||||
description = "List posts created within the specified number of minutes"
|
|
||||||
)
|
|
||||||
@ApiResponse(
|
|
||||||
responseCode = "200",
|
|
||||||
description = "Recent posts",
|
|
||||||
content = @Content(
|
|
||||||
array = @ArraySchema(schema = @Schema(implementation = PostSummaryDto.class))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
public List<PostSummaryDto> recentPosts(@RequestParam("minutes") int minutes) {
|
|
||||||
return postService
|
|
||||||
.listRecentPosts(minutes)
|
|
||||||
.stream()
|
|
||||||
.map(postMapper::toSummaryDto)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
@GetMapping("/ranking")
|
@GetMapping("/ranking")
|
||||||
@Operation(summary = "Ranking posts", description = "List posts by view rankings")
|
@Operation(summary = "Ranking posts", description = "List posts by view rankings")
|
||||||
@ApiResponse(
|
@ApiResponse(
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
package com.openisle.dto;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DTO representing the context of a comment including its post and previous comments.
|
|
||||||
*/
|
|
||||||
@Data
|
|
||||||
public class CommentContextDto {
|
|
||||||
|
|
||||||
private PostSummaryDto post;
|
|
||||||
private CommentDto targetComment;
|
|
||||||
private List<CommentDto> previousComments;
|
|
||||||
}
|
|
||||||
@@ -3,7 +3,6 @@ package com.openisle.repository;
|
|||||||
import com.openisle.model.Comment;
|
import com.openisle.model.Comment;
|
||||||
import com.openisle.model.Post;
|
import com.openisle.model.Post;
|
||||||
import com.openisle.model.User;
|
import com.openisle.model.User;
|
||||||
import java.time.LocalDateTime;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.springframework.data.domain.Pageable;
|
import org.springframework.data.domain.Pageable;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
@@ -11,10 +10,6 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
|||||||
public interface CommentRepository extends JpaRepository<Comment, Long> {
|
public interface CommentRepository extends JpaRepository<Comment, Long> {
|
||||||
List<Comment> findByPostAndParentIsNullOrderByCreatedAtAsc(Post post);
|
List<Comment> findByPostAndParentIsNullOrderByCreatedAtAsc(Post post);
|
||||||
List<Comment> findByParentOrderByCreatedAtAsc(Comment parent);
|
List<Comment> findByParentOrderByCreatedAtAsc(Comment parent);
|
||||||
List<Comment> findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
|
|
||||||
Post post,
|
|
||||||
LocalDateTime createdAt
|
|
||||||
);
|
|
||||||
List<Comment> findByAuthorOrderByCreatedAtDesc(User author, Pageable pageable);
|
List<Comment> findByAuthorOrderByCreatedAtDesc(User author, Pageable pageable);
|
||||||
List<Comment> findByContentContainingIgnoreCase(String keyword);
|
List<Comment> findByContentContainingIgnoreCase(String keyword);
|
||||||
|
|
||||||
|
|||||||
@@ -19,10 +19,6 @@ public interface PostRepository extends JpaRepository<Post, Long> {
|
|||||||
List<Post> findByStatusOrderByCreatedAtDesc(PostStatus status, Pageable pageable);
|
List<Post> findByStatusOrderByCreatedAtDesc(PostStatus status, Pageable pageable);
|
||||||
List<Post> findByStatusOrderByViewsDesc(PostStatus status);
|
List<Post> findByStatusOrderByViewsDesc(PostStatus status);
|
||||||
List<Post> findByStatusOrderByViewsDesc(PostStatus status, Pageable pageable);
|
List<Post> findByStatusOrderByViewsDesc(PostStatus status, Pageable pageable);
|
||||||
List<Post> findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
|
|
||||||
PostStatus status,
|
|
||||||
LocalDateTime createdAt
|
|
||||||
);
|
|
||||||
List<Post> findByAuthorAndStatusOrderByCreatedAtDesc(
|
List<Post> findByAuthorAndStatusOrderByCreatedAtDesc(
|
||||||
User author,
|
User author,
|
||||||
PostStatus status,
|
PostStatus status,
|
||||||
|
|||||||
@@ -266,27 +266,6 @@ public class CommentService {
|
|||||||
return replies;
|
return replies;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Comment getComment(Long commentId) {
|
|
||||||
log.debug("getComment called for id {}", commentId);
|
|
||||||
return commentRepository
|
|
||||||
.findById(commentId)
|
|
||||||
.orElseThrow(() -> new com.openisle.exception.NotFoundException("Comment not found"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Comment> getCommentsBefore(Comment comment) {
|
|
||||||
log.debug("getCommentsBefore called for comment {}", comment.getId());
|
|
||||||
List<Comment> comments = commentRepository.findByPostAndCreatedAtLessThanOrderByCreatedAtAsc(
|
|
||||||
comment.getPost(),
|
|
||||||
comment.getCreatedAt()
|
|
||||||
);
|
|
||||||
log.debug(
|
|
||||||
"getCommentsBefore returning {} comments for comment {}",
|
|
||||||
comments.size(),
|
|
||||||
comment.getId()
|
|
||||||
);
|
|
||||||
return comments;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Comment> getRecentCommentsByUser(String username, int limit) {
|
public List<Comment> getRecentCommentsByUser(String username, int limit) {
|
||||||
log.debug("getRecentCommentsByUser called for user {} with limit {}", username, limit);
|
log.debug("getRecentCommentsByUser called for user {} with limit {}", username, limit);
|
||||||
User user = userRepository
|
User user = userRepository
|
||||||
|
|||||||
@@ -770,18 +770,6 @@ public class PostService {
|
|||||||
return listPostsByCategories(null, null, null);
|
return listPostsByCategories(null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Post> listRecentPosts(int minutes) {
|
|
||||||
if (minutes <= 0) {
|
|
||||||
throw new IllegalArgumentException("Minutes must be positive");
|
|
||||||
}
|
|
||||||
LocalDateTime since = LocalDateTime.now().minusMinutes(minutes);
|
|
||||||
List<Post> posts = postRepository.findByStatusAndCreatedAtGreaterThanEqualOrderByCreatedAtDesc(
|
|
||||||
PostStatus.PUBLISHED,
|
|
||||||
since
|
|
||||||
);
|
|
||||||
return sortByPinnedAndCreated(posts);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Post> listPostsByViews(Integer page, Integer pageSize) {
|
public List<Post> listPostsByViews(Integer page, Integer pageSize) {
|
||||||
return listPostsByViews(null, null, page, pageSize);
|
return listPostsByViews(null, null, page, pageSize);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,12 +40,12 @@ echo "👉 Build images ..."
|
|||||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||||
build --pull \
|
build --pull \
|
||||||
--build-arg NUXT_ENV=production \
|
--build-arg NUXT_ENV=production \
|
||||||
frontend_service mcp
|
frontend_service mcp-service
|
||||||
|
|
||||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||||
up -d --force-recreate --remove-orphans --no-deps \
|
up -d --force-recreate --remove-orphans --no-deps \
|
||||||
mysql redis rabbitmq websocket-service springboot frontend_service mcp
|
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
|
||||||
|
|
||||||
echo "👉 Current status:"
|
echo "👉 Current status:"
|
||||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||||
|
|||||||
@@ -39,12 +39,12 @@ echo "👉 Build images (staging)..."
|
|||||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||||
build --pull \
|
build --pull \
|
||||||
--build-arg NUXT_ENV=staging \
|
--build-arg NUXT_ENV=staging \
|
||||||
frontend_service mcp
|
frontend_service mcp-service
|
||||||
|
|
||||||
echo "👉 Recreate & start all target services (no dev profile)..."
|
echo "👉 Recreate & start all target services (no dev profile)..."
|
||||||
docker compose -f "$compose_file" --env-file "$env_file" \
|
docker compose -f "$compose_file" --env-file "$env_file" \
|
||||||
up -d --force-recreate --remove-orphans --no-deps \
|
up -d --force-recreate --remove-orphans --no-deps \
|
||||||
mysql redis rabbitmq websocket-service springboot frontend_service mcp
|
mysql redis rabbitmq websocket-service springboot mcp-service frontend_service
|
||||||
|
|
||||||
echo "👉 Current status:"
|
echo "👉 Current status:"
|
||||||
docker compose -f "$compose_file" --env-file "$env_file" ps
|
docker compose -f "$compose_file" --env-file "$env_file" ps
|
||||||
|
|||||||
@@ -178,32 +178,38 @@ services:
|
|||||||
- dev
|
- dev
|
||||||
- prod
|
- prod
|
||||||
|
|
||||||
mcp:
|
mcp-service:
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
dockerfile: docker/mcp.Dockerfile
|
dockerfile: mcp/Dockerfile
|
||||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
container_name: ${COMPOSE_PROJECT_NAME}-openisle-mcp
|
||||||
env_file:
|
env_file:
|
||||||
- ${ENV_FILE:-../.env}
|
- ${ENV_FILE:-../.env}
|
||||||
environment:
|
environment:
|
||||||
OPENISLE_MCP_BACKEND_BASE_URL: http://springboot:${SERVER_PORT:-8080}
|
MCP_HOST: ${MCP_HOST:-0.0.0.0}
|
||||||
OPENISLE_MCP_HOST: 0.0.0.0
|
MCP_PORT: ${MCP_PORT:-9090}
|
||||||
OPENISLE_MCP_PORT: ${OPENISLE_MCP_PORT:-8085}
|
MCP_BACKEND_BASE_URL: ${MCP_BACKEND_BASE_URL:-http://springboot:8080}
|
||||||
OPENISLE_MCP_TRANSPORT: ${OPENISLE_MCP_TRANSPORT:-streamable-http}
|
MCP_CONNECT_TIMEOUT: ${MCP_CONNECT_TIMEOUT:-5}
|
||||||
OPENISLE_MCP_REQUEST_TIMEOUT: ${OPENISLE_MCP_REQUEST_TIMEOUT:-10.0}
|
MCP_READ_TIMEOUT: ${MCP_READ_TIMEOUT:-10}
|
||||||
ports:
|
ports:
|
||||||
- "${OPENISLE_MCP_PORT:-8085}:${OPENISLE_MCP_PORT:-8085}"
|
- "${MCP_PORT:-9090}:${MCP_PORT:-9090}"
|
||||||
depends_on:
|
depends_on:
|
||||||
springboot:
|
springboot:
|
||||||
condition: service_started
|
condition: service_healthy
|
||||||
|
command: ["openisle-mcp"]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "curl -fsS http://127.0.0.1:${MCP_PORT:-9090}/healthz || exit 1"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 30
|
||||||
|
start_period: 20s
|
||||||
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
- openisle-network
|
- openisle-network
|
||||||
profiles:
|
profiles:
|
||||||
- dev
|
- dev
|
||||||
- dev_local_backend
|
|
||||||
- prod
|
- prod
|
||||||
|
|
||||||
|
|
||||||
websocket-service:
|
websocket-service:
|
||||||
image: maven:3.9-eclipse-temurin-17
|
image: maven:3.9-eclipse-temurin-17
|
||||||
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
|
container_name: ${COMPOSE_PROJECT_NAME}-openisle-websocket
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
FROM python:3.11-slim AS base
|
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
|
||||||
PYTHONUNBUFFERED=1
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY mcp/pyproject.toml mcp/README.md ./
|
|
||||||
COPY mcp/src ./src
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir --upgrade pip \
|
|
||||||
&& pip install --no-cache-dir .
|
|
||||||
|
|
||||||
ENV OPENISLE_MCP_HOST=0.0.0.0 \
|
|
||||||
OPENISLE_MCP_PORT=8085 \
|
|
||||||
OPENISLE_MCP_TRANSPORT=streamable-http
|
|
||||||
|
|
||||||
EXPOSE 8085
|
|
||||||
|
|
||||||
CMD ["openisle-mcp"]
|
|
||||||
|
|
||||||
21
mcp/Dockerfile
Normal file
21
mcp/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY mcp/pyproject.toml ./pyproject.toml
|
||||||
|
COPY mcp/README.md ./README.md
|
||||||
|
COPY mcp/src ./src
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip \
|
||||||
|
&& pip install --no-cache-dir .
|
||||||
|
|
||||||
|
EXPOSE 9090
|
||||||
|
|
||||||
|
CMD ["openisle-mcp"]
|
||||||
@@ -1,41 +1,34 @@
|
|||||||
# OpenIsle MCP Server
|
# OpenIsle MCP Service
|
||||||
|
|
||||||
This package provides a [Model Context Protocol](https://modelcontextprotocol.io) (MCP) server
|
This package hosts a lightweight Python service that exposes OpenIsle search
|
||||||
that exposes OpenIsle's search capabilities as MCP tools. The initial release focuses on the
|
capabilities through a Model Context Protocol (MCP) compatible HTTP interface.
|
||||||
global search endpoint so the agent ecosystem can retrieve relevant posts, users, tags, and
|
It currently forwards search requests to the main Spring Boot backend and
|
||||||
other resources.
|
returns the aggregated results. The service is intentionally simple so we can
|
||||||
|
iterate quickly and extend it with additional tools (for example, post
|
||||||
|
creation) in future updates.
|
||||||
|
|
||||||
## Configuration
|
## Local development
|
||||||
|
|
||||||
The server is configured through environment variables (all prefixed with `OPENISLE_MCP_`):
|
|
||||||
|
|
||||||
| Variable | Default | Description |
|
|
||||||
| --- | --- | --- |
|
|
||||||
| `BACKEND_BASE_URL` | `http://springboot:8080` | Base URL of the OpenIsle backend. |
|
|
||||||
| `PORT` | `8085` | TCP port when running with the `streamable-http` transport. |
|
|
||||||
| `HOST` | `0.0.0.0` | Interface to bind when serving HTTP. |
|
|
||||||
| `TRANSPORT` | `streamable-http` | Transport to use (`stdio`, `sse`, or `streamable-http`). |
|
|
||||||
| `REQUEST_TIMEOUT` | `10.0` | Timeout (seconds) for backend HTTP requests. |
|
|
||||||
|
|
||||||
## Running locally
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install .
|
pip install -e ./mcp
|
||||||
OPENISLE_MCP_BACKEND_BASE_URL="http://localhost:8080" openisle-mcp
|
openisle-mcp
|
||||||
```
|
```
|
||||||
|
|
||||||
By default the server listens on port `8085` and serves MCP over Streamable HTTP.
|
By default the server listens on port `9090` and expects the Spring Boot backend
|
||||||
|
at `http://localhost:8080`. Configure the behaviour with the following
|
||||||
|
environment variables:
|
||||||
|
|
||||||
## Available tools
|
- `MCP_PORT` – HTTP port the MCP service should listen on (default: `9090`).
|
||||||
|
- `MCP_HOST` – Bind host for the HTTP server (default: `0.0.0.0`).
|
||||||
|
- `MCP_BACKEND_BASE_URL` – Base URL of the Spring Boot backend that provides the
|
||||||
|
search endpoints (default: `http://springboot:8080`).
|
||||||
|
- `MCP_CONNECT_TIMEOUT` – Connection timeout (seconds) when calling the backend
|
||||||
|
(default: `5`).
|
||||||
|
- `MCP_READ_TIMEOUT` – Read timeout (seconds) when calling the backend (default:
|
||||||
|
`10`).
|
||||||
|
|
||||||
| Tool | Description |
|
## Docker
|
||||||
| --- | --- |
|
|
||||||
| `search` | Perform a global search against the OpenIsle backend. |
|
|
||||||
| `reply_to_post` | Create a new comment on a post using a JWT token. |
|
|
||||||
| `reply_to_comment` | Reply to an existing comment using a JWT token. |
|
|
||||||
| `recent_posts` | Retrieve posts created within the last *N* minutes. |
|
|
||||||
|
|
||||||
The tools return structured data mirroring the backend DTOs, including highlighted snippets for
|
|
||||||
search results, the full comment payload for post replies and comment replies, and detailed
|
|
||||||
metadata for recent posts.
|
|
||||||
|
|
||||||
|
The repository contains a Dockerfile that builds a slim Python image running the
|
||||||
|
service with `uvicorn`. The compose configuration wires the container into the
|
||||||
|
existing OpenIsle stack so that deployments automatically start the MCP service.
|
||||||
|
|||||||
@@ -1,27 +1,25 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["hatchling>=1.25"]
|
requires = ["hatchling>=1.21.0"]
|
||||||
build-backend = "hatchling.build"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "openisle-mcp"
|
name = "openisle-mcp"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = "Model Context Protocol server exposing OpenIsle search capabilities."
|
description = "Model Context Protocol server exposing OpenIsle search capabilities"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
authors = [{ name = "OpenIsle", email = "engineering@openisle.example" }]
|
authors = [
|
||||||
|
{ name = "OpenIsle" }
|
||||||
|
]
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"mcp>=1.19.0",
|
"fastapi>=0.111.0,<1.0.0",
|
||||||
"httpx>=0.28,<0.29",
|
"uvicorn[standard]>=0.29.0,<0.31.0",
|
||||||
"pydantic>=2.12,<3",
|
"httpx>=0.27.0,<0.28.0",
|
||||||
"pydantic-settings>=2.11,<3"
|
"pydantic>=2.7.0,<3.0.0"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
openisle-mcp = "openisle_mcp.server:main"
|
openisle-mcp = "openisle_mcp.__main__:main"
|
||||||
|
|
||||||
[tool.hatch.build]
|
[tool.hatch.build.targets.wheel]
|
||||||
packages = ["src/openisle_mcp"]
|
packages = ["src/openisle_mcp"]
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
line-length = 100
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"""OpenIsle MCP server package."""
|
"""OpenIsle MCP service package."""
|
||||||
|
|
||||||
from .config import Settings, get_settings
|
from .config import Settings, get_settings
|
||||||
|
from .server import create_app
|
||||||
|
|
||||||
__all__ = ["Settings", "get_settings"]
|
__all__ = ["Settings", "get_settings", "create_app"]
|
||||||
|
|
||||||
|
|||||||
24
mcp/src/openisle_mcp/__main__.py
Normal file
24
mcp/src/openisle_mcp/__main__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Entrypoint for running the MCP service with ``python -m``."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
|
|
||||||
|
from .config import get_settings
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
settings = get_settings()
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
uvicorn.run(
|
||||||
|
"openisle_mcp.server:create_app",
|
||||||
|
host=settings.host,
|
||||||
|
port=settings.port,
|
||||||
|
factory=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": # pragma: no cover
|
||||||
|
main()
|
||||||
44
mcp/src/openisle_mcp/client.py
Normal file
44
mcp/src/openisle_mcp/client.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""HTTP client helpers for talking to the Spring Boot backend."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from .config import Settings
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SearchClient:
|
||||||
|
"""Wrapper around :class:`httpx.AsyncClient` for search operations."""
|
||||||
|
|
||||||
|
def __init__(self, settings: Settings):
|
||||||
|
timeout = httpx.Timeout(
|
||||||
|
connect=settings.connect_timeout,
|
||||||
|
read=settings.read_timeout,
|
||||||
|
write=settings.read_timeout,
|
||||||
|
pool=None,
|
||||||
|
)
|
||||||
|
self._client = httpx.AsyncClient(
|
||||||
|
base_url=settings.normalized_backend_base_url,
|
||||||
|
timeout=timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def close(self) -> None:
|
||||||
|
await self._client.aclose()
|
||||||
|
|
||||||
|
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
|
||||||
|
LOGGER.debug("Performing global search for keyword '%s'", keyword)
|
||||||
|
response = await self._client.get("/api/search/global", params={"keyword": keyword})
|
||||||
|
response.raise_for_status()
|
||||||
|
payload = response.json()
|
||||||
|
if isinstance(payload, list):
|
||||||
|
return payload
|
||||||
|
LOGGER.warning("Unexpected payload type from backend: %s", type(payload))
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["SearchClient"]
|
||||||
@@ -1,66 +1,71 @@
|
|||||||
"""Application configuration helpers for the OpenIsle MCP server."""
|
"""Configuration helpers for the MCP service."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Literal
|
from typing import Any
|
||||||
|
|
||||||
from pydantic import Field, SecretStr
|
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
||||||
from pydantic.networks import AnyHttpUrl
|
|
||||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseModel):
|
||||||
"""Configuration for the MCP server."""
|
"""Application settings sourced from environment variables."""
|
||||||
|
|
||||||
backend_base_url: AnyHttpUrl = Field(
|
host: str = Field(default="0.0.0.0", description="Host to bind the HTTP server to")
|
||||||
"http://springboot:8080",
|
port: int = Field(default=9090, ge=1, le=65535, description="Port exposed by the MCP server")
|
||||||
description="Base URL for the OpenIsle backend service.",
|
backend_base_url: str = Field(
|
||||||
|
default="http://springboot:8080",
|
||||||
|
description="Base URL of the Spring Boot backend that provides search endpoints",
|
||||||
)
|
)
|
||||||
host: str = Field(
|
connect_timeout: float = Field(
|
||||||
"0.0.0.0",
|
default=5.0,
|
||||||
description="Host interface to bind when running with HTTP transports.",
|
ge=0.0,
|
||||||
|
description="Connection timeout when communicating with the backend (seconds)",
|
||||||
)
|
)
|
||||||
port: int = Field(
|
read_timeout: float = Field(
|
||||||
8085,
|
default=10.0,
|
||||||
ge=1,
|
ge=0.0,
|
||||||
le=65535,
|
description="Read timeout when communicating with the backend (seconds)",
|
||||||
description="TCP port for HTTP transports.",
|
|
||||||
)
|
|
||||||
transport: Literal["stdio", "sse", "streamable-http"] = Field(
|
|
||||||
"streamable-http",
|
|
||||||
description="MCP transport to use when running the server.",
|
|
||||||
)
|
|
||||||
request_timeout: float = Field(
|
|
||||||
10.0,
|
|
||||||
gt=0,
|
|
||||||
description="Timeout (seconds) for backend search requests.",
|
|
||||||
)
|
|
||||||
access_token: SecretStr | None = Field(
|
|
||||||
default=None,
|
|
||||||
description=(
|
|
||||||
"Optional JWT bearer token used for authenticated backend calls. "
|
|
||||||
"When set, tools that support authentication will use this token "
|
|
||||||
"automatically unless an explicit token override is provided."
|
|
||||||
),
|
|
||||||
)
|
|
||||||
log_level: str = Field(
|
|
||||||
"INFO",
|
|
||||||
description=(
|
|
||||||
"Logging level for the MCP server (e.g. DEBUG, INFO, WARNING)."
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
model_config = SettingsConfigDict(
|
model_config = ConfigDict(extra="ignore")
|
||||||
env_prefix="OPENISLE_MCP_",
|
|
||||||
env_file=".env",
|
@property
|
||||||
env_file_encoding="utf-8",
|
def normalized_backend_base_url(self) -> str:
|
||||||
case_sensitive=False,
|
"""Return the backend base URL without a trailing slash."""
|
||||||
)
|
|
||||||
|
return self.backend_base_url.rstrip("/")
|
||||||
|
|
||||||
|
|
||||||
|
ENV_MAPPING: dict[str, str] = {
|
||||||
|
"host": "MCP_HOST",
|
||||||
|
"port": "MCP_PORT",
|
||||||
|
"backend_base_url": "MCP_BACKEND_BASE_URL",
|
||||||
|
"connect_timeout": "MCP_CONNECT_TIMEOUT",
|
||||||
|
"read_timeout": "MCP_READ_TIMEOUT",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_environment_values() -> dict[str, Any]:
|
||||||
|
values: dict[str, Any] = {}
|
||||||
|
for field, env_name in ENV_MAPPING.items():
|
||||||
|
value = os.getenv(env_name)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
values[field] = value
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
@lru_cache(maxsize=1)
|
||||||
def get_settings() -> Settings:
|
def get_settings() -> Settings:
|
||||||
"""Return cached application settings."""
|
"""Load and validate application settings."""
|
||||||
|
|
||||||
return Settings()
|
values = _load_environment_values()
|
||||||
|
try:
|
||||||
|
return Settings(**values)
|
||||||
|
except ValidationError as exc: # pragma: no cover - defensive branch
|
||||||
|
raise RuntimeError("Invalid MCP configuration") from exc
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["Settings", "get_settings"]
|
||||||
|
|||||||
38
mcp/src/openisle_mcp/models.py
Normal file
38
mcp/src/openisle_mcp/models.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""Pydantic models shared across the MCP service."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
|
|
||||||
|
|
||||||
|
class SearchResult(BaseModel):
|
||||||
|
"""Representation of a single search result entry."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="ignore")
|
||||||
|
|
||||||
|
type: Optional[str] = Field(default=None, description="Type of the result entry")
|
||||||
|
id: Optional[int] = Field(default=None, description="Identifier of the result entry")
|
||||||
|
text: Optional[str] = Field(default=None, description="Primary text of the result entry")
|
||||||
|
subText: Optional[str] = Field(default=None, description="Secondary text associated with the result")
|
||||||
|
extra: Optional[str] = Field(default=None, description="Additional information about the result")
|
||||||
|
postId: Optional[int] = Field(default=None, description="Related post identifier, if applicable")
|
||||||
|
highlightedText: Optional[str] = Field(default=None, description="Highlighted primary text segment")
|
||||||
|
highlightedSubText: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Highlighted secondary text segment",
|
||||||
|
)
|
||||||
|
highlightedExtra: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Highlighted additional information",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SearchResponse(BaseModel):
|
||||||
|
"""Response payload returned by the search endpoint."""
|
||||||
|
|
||||||
|
results: list[SearchResult] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["SearchResult", "SearchResponse"]
|
||||||
@@ -1,333 +0,0 @@
|
|||||||
"""Pydantic models describing tool inputs and outputs."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field, ConfigDict
|
|
||||||
|
|
||||||
|
|
||||||
class SearchResultItem(BaseModel):
|
|
||||||
"""A single search result entry."""
|
|
||||||
|
|
||||||
type: str = Field(description="Entity type for the result (post, user, tag, etc.).")
|
|
||||||
id: Optional[int] = Field(default=None, description="Identifier of the matched entity.")
|
|
||||||
text: Optional[str] = Field(default=None, description="Primary text associated with the result.")
|
|
||||||
sub_text: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="subText",
|
|
||||||
description="Secondary text, e.g. a username or excerpt.",
|
|
||||||
)
|
|
||||||
extra: Optional[str] = Field(default=None, description="Additional contextual information.")
|
|
||||||
post_id: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="postId",
|
|
||||||
description="Associated post identifier when relevant.",
|
|
||||||
)
|
|
||||||
highlighted_text: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="highlightedText",
|
|
||||||
description="Highlighted snippet of the primary text if available.",
|
|
||||||
)
|
|
||||||
highlighted_sub_text: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="highlightedSubText",
|
|
||||||
description="Highlighted snippet of the secondary text if available.",
|
|
||||||
)
|
|
||||||
highlighted_extra: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="highlightedExtra",
|
|
||||||
description="Highlighted snippet of extra information if available.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True)
|
|
||||||
|
|
||||||
|
|
||||||
class SearchResponse(BaseModel):
|
|
||||||
"""Structured response returned by the search tool."""
|
|
||||||
|
|
||||||
keyword: str = Field(description="The keyword that was searched.")
|
|
||||||
total: int = Field(description="Total number of matches returned by the backend.")
|
|
||||||
results: list[SearchResultItem] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Ordered collection of search results.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthorInfo(BaseModel):
|
|
||||||
"""Summary of a post or comment author."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Author identifier.")
|
|
||||||
username: Optional[str] = Field(default=None, description="Author username.")
|
|
||||||
avatar: Optional[str] = Field(default=None, description="URL of the author's avatar.")
|
|
||||||
display_medal: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="displayMedal",
|
|
||||||
description="Medal displayed next to the author, when available.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class CategoryInfo(BaseModel):
|
|
||||||
"""Basic information about a post category."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Category identifier.")
|
|
||||||
name: Optional[str] = Field(default=None, description="Category name.")
|
|
||||||
description: Optional[str] = Field(
|
|
||||||
default=None, description="Human friendly description of the category."
|
|
||||||
)
|
|
||||||
icon: Optional[str] = Field(default=None, description="Icon URL associated with the category.")
|
|
||||||
small_icon: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="smallIcon",
|
|
||||||
description="Compact icon URL for the category.",
|
|
||||||
)
|
|
||||||
count: Optional[int] = Field(default=None, description="Number of posts within the category.")
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class TagInfo(BaseModel):
|
|
||||||
"""Details for a tag assigned to a post."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Tag identifier.")
|
|
||||||
name: Optional[str] = Field(default=None, description="Tag name.")
|
|
||||||
description: Optional[str] = Field(default=None, description="Description of the tag.")
|
|
||||||
icon: Optional[str] = Field(default=None, description="Icon URL for the tag.")
|
|
||||||
small_icon: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="smallIcon",
|
|
||||||
description="Compact icon URL for the tag.",
|
|
||||||
)
|
|
||||||
created_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="createdAt",
|
|
||||||
description="When the tag was created.",
|
|
||||||
)
|
|
||||||
count: Optional[int] = Field(default=None, description="Number of posts using the tag.")
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class ReactionInfo(BaseModel):
|
|
||||||
"""Representation of a reaction on a post or comment."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Reaction identifier.")
|
|
||||||
type: Optional[str] = Field(default=None, description="Reaction type (emoji, like, etc.).")
|
|
||||||
user: Optional[str] = Field(default=None, description="Username of the reacting user.")
|
|
||||||
post_id: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="postId",
|
|
||||||
description="Related post identifier when applicable.",
|
|
||||||
)
|
|
||||||
comment_id: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="commentId",
|
|
||||||
description="Related comment identifier when applicable.",
|
|
||||||
)
|
|
||||||
message_id: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="messageId",
|
|
||||||
description="Related message identifier when applicable.",
|
|
||||||
)
|
|
||||||
reward: Optional[int] = Field(default=None, description="Reward granted for the reaction, if any.")
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class CommentData(BaseModel):
|
|
||||||
"""Comment information returned by the backend."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Comment identifier.")
|
|
||||||
content: Optional[str] = Field(default=None, description="Markdown content of the comment.")
|
|
||||||
created_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="createdAt",
|
|
||||||
description="Timestamp when the comment was created.",
|
|
||||||
)
|
|
||||||
pinned_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="pinnedAt",
|
|
||||||
description="Timestamp when the comment was pinned, if applicable.",
|
|
||||||
)
|
|
||||||
author: Optional[AuthorInfo] = Field(default=None, description="Author of the comment.")
|
|
||||||
replies: list["CommentData"] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Nested replies associated with the comment.",
|
|
||||||
)
|
|
||||||
reactions: list[ReactionInfo] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Reactions applied to the comment.",
|
|
||||||
)
|
|
||||||
reward: Optional[int] = Field(default=None, description="Reward gained by posting the comment.")
|
|
||||||
point_reward: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="pointReward",
|
|
||||||
description="Points rewarded for the comment.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class CommentReplyResult(BaseModel):
|
|
||||||
"""Structured response returned when replying to a comment."""
|
|
||||||
|
|
||||||
comment: CommentData = Field(description="Reply comment returned by the backend.")
|
|
||||||
|
|
||||||
|
|
||||||
class CommentCreateResult(BaseModel):
|
|
||||||
"""Structured response returned when creating a comment on a post."""
|
|
||||||
|
|
||||||
comment: CommentData = Field(description="Comment returned by the backend.")
|
|
||||||
|
|
||||||
|
|
||||||
class PostSummary(BaseModel):
|
|
||||||
"""Summary information for a post."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Post identifier.")
|
|
||||||
title: Optional[str] = Field(default=None, description="Title of the post.")
|
|
||||||
content: Optional[str] = Field(default=None, description="Excerpt or content of the post.")
|
|
||||||
created_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="createdAt",
|
|
||||||
description="When the post was created.",
|
|
||||||
)
|
|
||||||
author: Optional[AuthorInfo] = Field(default=None, description="Author who created the post.")
|
|
||||||
category: Optional[CategoryInfo] = Field(default=None, description="Category of the post.")
|
|
||||||
tags: list[TagInfo] = Field(default_factory=list, description="Tags assigned to the post.")
|
|
||||||
views: Optional[int] = Field(default=None, description="Total view count for the post.")
|
|
||||||
comment_count: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="commentCount",
|
|
||||||
description="Number of comments on the post.",
|
|
||||||
)
|
|
||||||
status: Optional[str] = Field(default=None, description="Workflow status of the post.")
|
|
||||||
pinned_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="pinnedAt",
|
|
||||||
description="When the post was pinned, if ever.",
|
|
||||||
)
|
|
||||||
last_reply_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="lastReplyAt",
|
|
||||||
description="Timestamp of the most recent reply.",
|
|
||||||
)
|
|
||||||
reactions: list[ReactionInfo] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Reactions received by the post.",
|
|
||||||
)
|
|
||||||
participants: list[AuthorInfo] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Users participating in the discussion.",
|
|
||||||
)
|
|
||||||
subscribed: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Whether the current user is subscribed to the post.",
|
|
||||||
)
|
|
||||||
reward: Optional[int] = Field(default=None, description="Reward granted for the post.")
|
|
||||||
point_reward: Optional[int] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="pointReward",
|
|
||||||
description="Points granted for the post.",
|
|
||||||
)
|
|
||||||
type: Optional[str] = Field(default=None, description="Type of the post.")
|
|
||||||
lottery: Optional[dict[str, Any]] = Field(
|
|
||||||
default=None, description="Lottery information for the post."
|
|
||||||
)
|
|
||||||
poll: Optional[dict[str, Any]] = Field(
|
|
||||||
default=None, description="Poll information for the post."
|
|
||||||
)
|
|
||||||
rss_excluded: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="rssExcluded",
|
|
||||||
description="Whether the post is excluded from RSS feeds.",
|
|
||||||
)
|
|
||||||
closed: Optional[bool] = Field(default=None, description="Whether the post is closed for replies.")
|
|
||||||
visible_scope: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="visibleScope",
|
|
||||||
description="Visibility scope configuration for the post.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class RecentPostsResponse(BaseModel):
|
|
||||||
"""Structured response for the recent posts tool."""
|
|
||||||
|
|
||||||
minutes: int = Field(description="Time window, in minutes, used for the query.")
|
|
||||||
total: int = Field(description="Number of posts returned by the backend.")
|
|
||||||
posts: list[PostSummary] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Posts created within the requested time window.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
CommentData.model_rebuild()
|
|
||||||
|
|
||||||
|
|
||||||
class PostDetail(PostSummary):
|
|
||||||
"""Detailed information for a single post, including comments."""
|
|
||||||
|
|
||||||
comments: list[CommentData] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Comments that belong to the post.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationData(BaseModel):
|
|
||||||
"""Unread notification payload returned by the backend."""
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, description="Notification identifier.")
|
|
||||||
type: Optional[str] = Field(default=None, description="Type of the notification.")
|
|
||||||
post: Optional[PostSummary] = Field(
|
|
||||||
default=None, description="Post associated with the notification if applicable."
|
|
||||||
)
|
|
||||||
comment: Optional[CommentData] = Field(
|
|
||||||
default=None, description="Comment referenced by the notification when available."
|
|
||||||
)
|
|
||||||
parent_comment: Optional[CommentData] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="parentComment",
|
|
||||||
description="Parent comment for nested replies, when present.",
|
|
||||||
)
|
|
||||||
from_user: Optional[AuthorInfo] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="fromUser",
|
|
||||||
description="User who triggered the notification.",
|
|
||||||
)
|
|
||||||
reaction_type: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="reactionType",
|
|
||||||
description="Reaction type for reaction-based notifications.",
|
|
||||||
)
|
|
||||||
content: Optional[str] = Field(
|
|
||||||
default=None, description="Additional content or message for the notification."
|
|
||||||
)
|
|
||||||
approved: Optional[bool] = Field(
|
|
||||||
default=None, description="Approval status for moderation notifications."
|
|
||||||
)
|
|
||||||
read: Optional[bool] = Field(default=None, description="Whether the notification is read.")
|
|
||||||
created_at: Optional[datetime] = Field(
|
|
||||||
default=None,
|
|
||||||
alias="createdAt",
|
|
||||||
description="Timestamp when the notification was created.",
|
|
||||||
)
|
|
||||||
|
|
||||||
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
||||||
|
|
||||||
|
|
||||||
class UnreadNotificationsResponse(BaseModel):
|
|
||||||
"""Structured response for unread notification queries."""
|
|
||||||
|
|
||||||
page: int = Field(description="Requested page index for the unread notifications.")
|
|
||||||
size: int = Field(description="Requested page size for the unread notifications.")
|
|
||||||
total: int = Field(description="Number of unread notifications returned in this page.")
|
|
||||||
notifications: list[NotificationData] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Unread notifications returned by the backend.",
|
|
||||||
)
|
|
||||||
@@ -1,268 +0,0 @@
|
|||||||
"""HTTP client helpers for talking to the OpenIsle backend endpoints."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SearchClient:
|
|
||||||
"""Client for calling the OpenIsle HTTP APIs used by the MCP server."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
base_url: str,
|
|
||||||
*,
|
|
||||||
timeout: float = 10.0,
|
|
||||||
access_token: str | None = None,
|
|
||||||
) -> None:
|
|
||||||
self._base_url = base_url.rstrip("/")
|
|
||||||
self._timeout = timeout
|
|
||||||
self._client: httpx.AsyncClient | None = None
|
|
||||||
self._access_token = self._sanitize_token(access_token)
|
|
||||||
|
|
||||||
def _get_client(self) -> httpx.AsyncClient:
|
|
||||||
if self._client is None:
|
|
||||||
logger.debug(
|
|
||||||
"Creating httpx.AsyncClient for base URL %s with timeout %.2fs",
|
|
||||||
self._base_url,
|
|
||||||
self._timeout,
|
|
||||||
)
|
|
||||||
self._client = httpx.AsyncClient(
|
|
||||||
base_url=self._base_url,
|
|
||||||
timeout=self._timeout,
|
|
||||||
)
|
|
||||||
return self._client
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _sanitize_token(token: str | None) -> str | None:
|
|
||||||
if token is None:
|
|
||||||
return None
|
|
||||||
stripped = token.strip()
|
|
||||||
return stripped or None
|
|
||||||
|
|
||||||
def update_access_token(self, token: str | None) -> None:
|
|
||||||
"""Update the default access token used for authenticated requests."""
|
|
||||||
|
|
||||||
self._access_token = self._sanitize_token(token)
|
|
||||||
if self._access_token:
|
|
||||||
logger.debug("Configured default access token for SearchClient requests.")
|
|
||||||
else:
|
|
||||||
logger.debug("Cleared default access token for SearchClient requests.")
|
|
||||||
|
|
||||||
def _resolve_token(self, token: str | None) -> str | None:
|
|
||||||
candidate = self._sanitize_token(token)
|
|
||||||
if candidate is not None:
|
|
||||||
return candidate
|
|
||||||
return self._access_token
|
|
||||||
|
|
||||||
def _require_token(self, token: str | None) -> str:
|
|
||||||
resolved = self._resolve_token(token)
|
|
||||||
if resolved is None:
|
|
||||||
raise ValueError(
|
|
||||||
"Authenticated request requires an access token but none was provided."
|
|
||||||
)
|
|
||||||
return resolved
|
|
||||||
|
|
||||||
def _build_headers(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
token: str | None = None,
|
|
||||||
accept: str = "application/json",
|
|
||||||
include_json: bool = False,
|
|
||||||
) -> dict[str, str]:
|
|
||||||
headers: dict[str, str] = {"Accept": accept}
|
|
||||||
resolved = self._resolve_token(token)
|
|
||||||
if resolved:
|
|
||||||
headers["Authorization"] = f"Bearer {resolved}"
|
|
||||||
if include_json:
|
|
||||||
headers["Content-Type"] = "application/json"
|
|
||||||
return headers
|
|
||||||
|
|
||||||
async def global_search(self, keyword: str) -> list[dict[str, Any]]:
|
|
||||||
"""Call the global search endpoint and return the parsed JSON payload."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
logger.debug("Calling global search with keyword=%s", keyword)
|
|
||||||
response = await client.get(
|
|
||||||
"/api/search/global",
|
|
||||||
params={"keyword": keyword},
|
|
||||||
headers=self._build_headers(),
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
payload = response.json()
|
|
||||||
if not isinstance(payload, list):
|
|
||||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
|
||||||
raise ValueError(f"Unexpected response format from search endpoint: {formatted}")
|
|
||||||
logger.info(
|
|
||||||
"Global search returned %d results for keyword '%s'",
|
|
||||||
len(payload),
|
|
||||||
keyword,
|
|
||||||
)
|
|
||||||
return [self._ensure_dict(entry) for entry in payload]
|
|
||||||
|
|
||||||
async def reply_to_comment(
|
|
||||||
self,
|
|
||||||
comment_id: int,
|
|
||||||
token: str,
|
|
||||||
content: str,
|
|
||||||
captcha: str | None = None,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Reply to an existing comment and return the created reply."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
resolved_token = self._require_token(token)
|
|
||||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
|
||||||
payload: dict[str, Any] = {"content": content}
|
|
||||||
if captcha is not None:
|
|
||||||
stripped_captcha = captcha.strip()
|
|
||||||
if stripped_captcha:
|
|
||||||
payload["captcha"] = stripped_captcha
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
"Posting reply to comment_id=%s (captcha=%s)",
|
|
||||||
comment_id,
|
|
||||||
bool(captcha),
|
|
||||||
)
|
|
||||||
response = await client.post(
|
|
||||||
f"/api/comments/{comment_id}/replies",
|
|
||||||
json=payload,
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
body = self._ensure_dict(response.json())
|
|
||||||
logger.info("Reply to comment_id=%s succeeded with id=%s", comment_id, body.get("id"))
|
|
||||||
return body
|
|
||||||
|
|
||||||
async def reply_to_post(
|
|
||||||
self,
|
|
||||||
post_id: int,
|
|
||||||
token: str,
|
|
||||||
content: str,
|
|
||||||
captcha: str | None = None,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Create a comment on a post and return the backend payload."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
resolved_token = self._require_token(token)
|
|
||||||
headers = self._build_headers(token=resolved_token, include_json=True)
|
|
||||||
payload: dict[str, Any] = {"content": content}
|
|
||||||
if captcha is not None:
|
|
||||||
stripped_captcha = captcha.strip()
|
|
||||||
if stripped_captcha:
|
|
||||||
payload["captcha"] = stripped_captcha
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
"Posting comment to post_id=%s (captcha=%s)",
|
|
||||||
post_id,
|
|
||||||
bool(captcha),
|
|
||||||
)
|
|
||||||
response = await client.post(
|
|
||||||
f"/api/posts/{post_id}/comments",
|
|
||||||
json=payload,
|
|
||||||
headers=headers,
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
body = self._ensure_dict(response.json())
|
|
||||||
logger.info("Reply to post_id=%s succeeded with id=%s", post_id, body.get("id"))
|
|
||||||
return body
|
|
||||||
|
|
||||||
async def recent_posts(self, minutes: int) -> list[dict[str, Any]]:
|
|
||||||
"""Return posts created within the given timeframe."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
logger.debug(
|
|
||||||
"Fetching recent posts within last %s minutes",
|
|
||||||
minutes,
|
|
||||||
)
|
|
||||||
response = await client.get(
|
|
||||||
"/api/posts/recent",
|
|
||||||
params={"minutes": minutes},
|
|
||||||
headers=self._build_headers(),
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
payload = response.json()
|
|
||||||
if not isinstance(payload, list):
|
|
||||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
|
||||||
raise ValueError(
|
|
||||||
f"Unexpected response format from recent posts endpoint: {formatted}"
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"Fetched %d recent posts for window=%s minutes",
|
|
||||||
len(payload),
|
|
||||||
minutes,
|
|
||||||
)
|
|
||||||
return [self._ensure_dict(entry) for entry in payload]
|
|
||||||
|
|
||||||
async def get_post(self, post_id: int, token: str | None = None) -> dict[str, Any]:
|
|
||||||
"""Retrieve the detailed payload for a single post."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
headers = self._build_headers(token=token)
|
|
||||||
logger.debug("Fetching post details for post_id=%s", post_id)
|
|
||||||
response = await client.get(f"/api/posts/{post_id}", headers=headers)
|
|
||||||
response.raise_for_status()
|
|
||||||
body = self._ensure_dict(response.json())
|
|
||||||
logger.info(
|
|
||||||
"Retrieved post_id=%s successfully with %d top-level comments",
|
|
||||||
post_id,
|
|
||||||
len(body.get("comments", []) if isinstance(body.get("comments"), list) else []),
|
|
||||||
)
|
|
||||||
return body
|
|
||||||
|
|
||||||
async def list_unread_notifications(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
page: int = 0,
|
|
||||||
size: int = 30,
|
|
||||||
token: str | None = None,
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
"""Return unread notifications for the authenticated user."""
|
|
||||||
|
|
||||||
client = self._get_client()
|
|
||||||
resolved_token = self._require_token(token)
|
|
||||||
logger.debug(
|
|
||||||
"Fetching unread notifications with page=%s, size=%s",
|
|
||||||
page,
|
|
||||||
size,
|
|
||||||
)
|
|
||||||
response = await client.get(
|
|
||||||
"/api/notifications/unread",
|
|
||||||
params={"page": page, "size": size},
|
|
||||||
headers=self._build_headers(token=resolved_token),
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
payload = response.json()
|
|
||||||
if not isinstance(payload, list):
|
|
||||||
formatted = json.dumps(payload, ensure_ascii=False)[:200]
|
|
||||||
raise ValueError(
|
|
||||||
"Unexpected response format from unread notifications endpoint: "
|
|
||||||
f"{formatted}"
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"Fetched %d unread notifications (page=%s, size=%s)",
|
|
||||||
len(payload),
|
|
||||||
page,
|
|
||||||
size,
|
|
||||||
)
|
|
||||||
return [self._ensure_dict(entry) for entry in payload]
|
|
||||||
|
|
||||||
async def aclose(self) -> None:
|
|
||||||
"""Dispose of the underlying HTTP client."""
|
|
||||||
|
|
||||||
if self._client is not None:
|
|
||||||
await self._client.aclose()
|
|
||||||
self._client = None
|
|
||||||
logger.debug("Closed httpx.AsyncClient for SearchClient.")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _ensure_dict(entry: Any) -> dict[str, Any]:
|
|
||||||
if not isinstance(entry, dict):
|
|
||||||
raise ValueError(f"Expected JSON object, got: {type(entry)!r}")
|
|
||||||
return entry
|
|
||||||
@@ -1,563 +1,66 @@
|
|||||||
"""Entry point for running the OpenIsle MCP server."""
|
"""FastAPI application exposing the MCP server endpoints."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from typing import Annotated
|
|
||||||
|
|
||||||
|
from fastapi import Depends, FastAPI, HTTPException, Query, Request
|
||||||
import httpx
|
import httpx
|
||||||
from mcp.server.fastmcp import Context, FastMCP
|
|
||||||
from pydantic import ValidationError
|
|
||||||
from pydantic import Field as PydanticField
|
|
||||||
|
|
||||||
|
from .client import SearchClient
|
||||||
from .config import get_settings
|
from .config import get_settings
|
||||||
from .schemas import (
|
from .models import SearchResponse, SearchResult
|
||||||
CommentCreateResult,
|
|
||||||
CommentData,
|
|
||||||
CommentReplyResult,
|
|
||||||
NotificationData,
|
|
||||||
UnreadNotificationsResponse,
|
|
||||||
PostDetail,
|
|
||||||
PostSummary,
|
|
||||||
RecentPostsResponse,
|
|
||||||
SearchResponse,
|
|
||||||
SearchResultItem,
|
|
||||||
)
|
|
||||||
from .search_client import SearchClient
|
|
||||||
|
|
||||||
settings = get_settings()
|
LOGGER = logging.getLogger(__name__)
|
||||||
if not logging.getLogger().handlers:
|
|
||||||
logging.basicConfig(
|
|
||||||
level=getattr(logging, settings.log_level.upper(), logging.INFO),
|
async def _lifespan(app: FastAPI):
|
||||||
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
|
settings = get_settings()
|
||||||
|
client = SearchClient(settings)
|
||||||
|
app.state.settings = settings
|
||||||
|
app.state.search_client = client
|
||||||
|
LOGGER.info(
|
||||||
|
"Starting MCP server on %s:%s targeting backend %s",
|
||||||
|
settings.host,
|
||||||
|
settings.port,
|
||||||
|
settings.normalized_backend_base_url,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
logging.getLogger().setLevel(
|
|
||||||
getattr(logging, settings.log_level.upper(), logging.INFO)
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
search_client = SearchClient(
|
|
||||||
str(settings.backend_base_url),
|
|
||||||
timeout=settings.request_timeout,
|
|
||||||
access_token=(
|
|
||||||
settings.access_token.get_secret_value()
|
|
||||||
if settings.access_token is not None
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(_: FastMCP):
|
|
||||||
"""Lifecycle hook that disposes shared resources when the server stops."""
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.debug("OpenIsle MCP server lifespan started.")
|
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
logger.debug("Disposing shared SearchClient instance.")
|
LOGGER.info("Shutting down MCP server")
|
||||||
await search_client.aclose()
|
await client.close()
|
||||||
|
|
||||||
|
|
||||||
app = FastMCP(
|
def create_app() -> FastAPI:
|
||||||
name="openisle-mcp",
|
"""Create and configure the FastAPI application."""
|
||||||
instructions=(
|
|
||||||
"Use this server to search OpenIsle content, reply to posts and comments with an "
|
app = FastAPI(title="OpenIsle MCP Server", lifespan=_lifespan)
|
||||||
"authentication token, retrieve details for a specific post, list posts created "
|
|
||||||
"within a recent time window, and review unread notification messages."
|
@app.get("/healthz", tags=["health"])
|
||||||
),
|
async def healthcheck() -> dict[str, str]:
|
||||||
host=settings.host,
|
return {"status": "ok"}
|
||||||
port=settings.port,
|
|
||||||
lifespan=lifespan,
|
async def get_client(request: Request) -> SearchClient:
|
||||||
)
|
return request.app.state.search_client
|
||||||
|
|
||||||
|
@app.get("/search", response_model=SearchResponse, tags=["search"])
|
||||||
|
async def search(
|
||||||
|
keyword: str = Query(..., min_length=1, description="Keyword to search for"),
|
||||||
|
client: SearchClient = Depends(get_client),
|
||||||
|
) -> SearchResponse:
|
||||||
|
try:
|
||||||
|
raw_results = await client.global_search(keyword)
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
LOGGER.warning("Backend responded with error %s", exc.response.status_code)
|
||||||
|
raise HTTPException(status_code=exc.response.status_code, detail="Backend error") from exc
|
||||||
|
except httpx.HTTPError as exc:
|
||||||
|
LOGGER.error("Failed to reach backend: %s", exc)
|
||||||
|
raise HTTPException(status_code=503, detail="Search service unavailable") from exc
|
||||||
|
results = [SearchResult.model_validate(item) for item in raw_results]
|
||||||
|
return SearchResponse(results=results)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
__all__ = ["create_app"]
|
||||||
name="search",
|
|
||||||
description="Perform a global search across OpenIsle resources.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def search(
|
|
||||||
keyword: Annotated[str, PydanticField(description="Keyword to search for.")],
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> SearchResponse:
|
|
||||||
"""Call the OpenIsle global search endpoint and return structured results."""
|
|
||||||
|
|
||||||
sanitized = keyword.strip()
|
|
||||||
if not sanitized:
|
|
||||||
raise ValueError("Keyword must not be empty.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info("Received search request for keyword='%s'", sanitized)
|
|
||||||
raw_results = await search_client.global_search(sanitized)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{exc.response.status_code} while searching for '{sanitized}'."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = f"Unable to reach OpenIsle backend search service: {exc}."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
results = [SearchResultItem.model_validate(entry) for entry in raw_results]
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the OpenIsle backend search endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(f"Search keyword '{sanitized}' returned {len(results)} results.")
|
|
||||||
logger.debug(
|
|
||||||
"Validated %d search results for keyword='%s'",
|
|
||||||
len(results),
|
|
||||||
sanitized,
|
|
||||||
)
|
|
||||||
|
|
||||||
return SearchResponse(keyword=sanitized, total=len(results), results=results)
|
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
|
||||||
name="reply_to_post",
|
|
||||||
description="Create a comment on a post using an authentication token.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def reply_to_post(
|
|
||||||
post_id: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(ge=1, description="Identifier of the post being replied to."),
|
|
||||||
],
|
|
||||||
content: Annotated[
|
|
||||||
str,
|
|
||||||
PydanticField(description="Markdown content of the reply."),
|
|
||||||
],
|
|
||||||
captcha: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description="Optional captcha solution if the backend requires it.",
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
token: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description=(
|
|
||||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
|
||||||
),
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> CommentCreateResult:
|
|
||||||
"""Create a comment on a post and return the backend payload."""
|
|
||||||
|
|
||||||
sanitized_content = content.strip()
|
|
||||||
if not sanitized_content:
|
|
||||||
raise ValueError("Reply content must not be empty.")
|
|
||||||
|
|
||||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
|
||||||
|
|
||||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(
|
|
||||||
"Creating reply for post_id=%s (captcha=%s)",
|
|
||||||
post_id,
|
|
||||||
bool(sanitized_captcha),
|
|
||||||
)
|
|
||||||
raw_comment = await search_client.reply_to_post(
|
|
||||||
post_id,
|
|
||||||
sanitized_token,
|
|
||||||
sanitized_content,
|
|
||||||
sanitized_captcha,
|
|
||||||
)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
status_code = exc.response.status_code
|
|
||||||
if status_code == 401:
|
|
||||||
message = (
|
|
||||||
"Authentication failed while replying to post "
|
|
||||||
f"{post_id}. Please verify the token."
|
|
||||||
)
|
|
||||||
elif status_code == 403:
|
|
||||||
message = (
|
|
||||||
"The provided token is not authorized to reply to post "
|
|
||||||
f"{post_id}."
|
|
||||||
)
|
|
||||||
elif status_code == 404:
|
|
||||||
message = f"Post {post_id} was not found."
|
|
||||||
else:
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{status_code} while replying to post {post_id}."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = (
|
|
||||||
"Unable to reach OpenIsle backend comment service: "
|
|
||||||
f"{exc}."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
comment = CommentData.model_validate(raw_comment)
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the post comment endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(
|
|
||||||
"Reply created successfully for post "
|
|
||||||
f"{post_id}."
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
"Validated reply comment payload for post_id=%s (comment_id=%s)",
|
|
||||||
post_id,
|
|
||||||
comment.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
return CommentCreateResult(comment=comment)
|
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
|
||||||
name="reply_to_comment",
|
|
||||||
description="Reply to an existing comment using an authentication token.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def reply_to_comment(
|
|
||||||
comment_id: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(ge=1, description="Identifier of the comment being replied to."),
|
|
||||||
],
|
|
||||||
content: Annotated[
|
|
||||||
str,
|
|
||||||
PydanticField(description="Markdown content of the reply."),
|
|
||||||
],
|
|
||||||
captcha: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description="Optional captcha solution if the backend requires it.",
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
token: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description=(
|
|
||||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
|
||||||
),
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> CommentReplyResult:
|
|
||||||
"""Create a reply for a comment and return the backend payload."""
|
|
||||||
|
|
||||||
sanitized_content = content.strip()
|
|
||||||
if not sanitized_content:
|
|
||||||
raise ValueError("Reply content must not be empty.")
|
|
||||||
|
|
||||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
|
||||||
|
|
||||||
sanitized_captcha = captcha.strip() if isinstance(captcha, str) else None
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(
|
|
||||||
"Creating reply for comment_id=%s (captcha=%s)",
|
|
||||||
comment_id,
|
|
||||||
bool(sanitized_captcha),
|
|
||||||
)
|
|
||||||
raw_comment = await search_client.reply_to_comment(
|
|
||||||
comment_id,
|
|
||||||
sanitized_token,
|
|
||||||
sanitized_content,
|
|
||||||
sanitized_captcha,
|
|
||||||
)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
status_code = exc.response.status_code
|
|
||||||
if status_code == 401:
|
|
||||||
message = (
|
|
||||||
"Authentication failed while replying to comment "
|
|
||||||
f"{comment_id}. Please verify the token."
|
|
||||||
)
|
|
||||||
elif status_code == 403:
|
|
||||||
message = (
|
|
||||||
"The provided token is not authorized to reply to comment "
|
|
||||||
f"{comment_id}."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{status_code} while replying to comment {comment_id}."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = (
|
|
||||||
"Unable to reach OpenIsle backend comment service: "
|
|
||||||
f"{exc}."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
comment = CommentData.model_validate(raw_comment)
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the reply comment endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(
|
|
||||||
"Reply created successfully for comment "
|
|
||||||
f"{comment_id}."
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
"Validated reply payload for comment_id=%s (reply_id=%s)",
|
|
||||||
comment_id,
|
|
||||||
comment.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
return CommentReplyResult(comment=comment)
|
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
|
||||||
name="recent_posts",
|
|
||||||
description="Retrieve posts created in the last N minutes.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def recent_posts(
|
|
||||||
minutes: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(gt=0, le=1440, description="Time window in minutes to search for new posts."),
|
|
||||||
],
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> RecentPostsResponse:
|
|
||||||
"""Fetch recent posts from the backend and return structured data."""
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info("Fetching recent posts for last %s minutes", minutes)
|
|
||||||
raw_posts = await search_client.recent_posts(minutes)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{exc.response.status_code} while fetching recent posts for the last {minutes} minutes."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = f"Unable to reach OpenIsle backend recent posts service: {exc}."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
posts = [PostSummary.model_validate(entry) for entry in raw_posts]
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the recent posts endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(
|
|
||||||
f"Found {len(posts)} posts created within the last {minutes} minutes."
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
"Validated %d recent posts for window=%s minutes",
|
|
||||||
len(posts),
|
|
||||||
minutes,
|
|
||||||
)
|
|
||||||
|
|
||||||
return RecentPostsResponse(minutes=minutes, total=len(posts), posts=posts)
|
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
|
||||||
name="get_post",
|
|
||||||
description="Retrieve detailed information for a single post.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def get_post(
|
|
||||||
post_id: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(ge=1, description="Identifier of the post to retrieve."),
|
|
||||||
],
|
|
||||||
token: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description="Optional JWT bearer token to view the post as an authenticated user.",
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> PostDetail:
|
|
||||||
"""Fetch post details from the backend and validate the response."""
|
|
||||||
|
|
||||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
|
||||||
if sanitized_token == "":
|
|
||||||
sanitized_token = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info("Fetching post details for post_id=%s", post_id)
|
|
||||||
raw_post = await search_client.get_post(post_id, sanitized_token)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
status_code = exc.response.status_code
|
|
||||||
if status_code == 404:
|
|
||||||
message = f"Post {post_id} was not found."
|
|
||||||
elif status_code == 401:
|
|
||||||
message = "Authentication failed while retrieving the post."
|
|
||||||
elif status_code == 403:
|
|
||||||
message = "The provided token is not authorized to view this post."
|
|
||||||
else:
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{status_code} while retrieving post {post_id}."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = f"Unable to reach OpenIsle backend post service: {exc}."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
post = PostDetail.model_validate(raw_post)
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the post detail endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(f"Retrieved post {post_id} successfully.")
|
|
||||||
logger.debug(
|
|
||||||
"Validated post payload for post_id=%s with %d comments",
|
|
||||||
post_id,
|
|
||||||
len(post.comments),
|
|
||||||
)
|
|
||||||
|
|
||||||
return post
|
|
||||||
|
|
||||||
|
|
||||||
@app.tool(
|
|
||||||
name="list_unread_messages",
|
|
||||||
description="List unread notification messages for the authenticated user.",
|
|
||||||
structured_output=True,
|
|
||||||
)
|
|
||||||
async def list_unread_messages(
|
|
||||||
page: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(
|
|
||||||
default=0,
|
|
||||||
ge=0,
|
|
||||||
description="Page number of unread notifications to retrieve.",
|
|
||||||
),
|
|
||||||
] = 0,
|
|
||||||
size: Annotated[
|
|
||||||
int,
|
|
||||||
PydanticField(
|
|
||||||
default=30,
|
|
||||||
ge=1,
|
|
||||||
le=100,
|
|
||||||
description="Number of unread notifications to include per page.",
|
|
||||||
),
|
|
||||||
] = 30,
|
|
||||||
token: Annotated[
|
|
||||||
str | None,
|
|
||||||
PydanticField(
|
|
||||||
default=None,
|
|
||||||
description=(
|
|
||||||
"Optional JWT bearer token. When omitted the configured access token is used."
|
|
||||||
),
|
|
||||||
),
|
|
||||||
] = None,
|
|
||||||
ctx: Context | None = None,
|
|
||||||
) -> UnreadNotificationsResponse:
|
|
||||||
"""Retrieve unread notifications and return structured data."""
|
|
||||||
|
|
||||||
sanitized_token = token.strip() if isinstance(token, str) else None
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(
|
|
||||||
"Fetching unread notifications (page=%s, size=%s)",
|
|
||||||
page,
|
|
||||||
size,
|
|
||||||
)
|
|
||||||
raw_notifications = await search_client.list_unread_notifications(
|
|
||||||
page=page,
|
|
||||||
size=size,
|
|
||||||
token=sanitized_token,
|
|
||||||
)
|
|
||||||
except httpx.HTTPStatusError as exc: # pragma: no cover - network errors
|
|
||||||
message = (
|
|
||||||
"OpenIsle backend returned HTTP "
|
|
||||||
f"{exc.response.status_code} while fetching unread notifications."
|
|
||||||
)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
except httpx.RequestError as exc: # pragma: no cover - network errors
|
|
||||||
message = f"Unable to reach OpenIsle backend notification service: {exc}."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
try:
|
|
||||||
notifications = [
|
|
||||||
NotificationData.model_validate(entry) for entry in raw_notifications
|
|
||||||
]
|
|
||||||
except ValidationError as exc:
|
|
||||||
message = "Received malformed data from the unread notifications endpoint."
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.error(message)
|
|
||||||
raise ValueError(message) from exc
|
|
||||||
|
|
||||||
total = len(notifications)
|
|
||||||
if ctx is not None:
|
|
||||||
await ctx.info(
|
|
||||||
f"Retrieved {total} unread notifications (page {page}, size {size})."
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
"Validated %d unread notifications for page=%s size=%s",
|
|
||||||
total,
|
|
||||||
page,
|
|
||||||
size,
|
|
||||||
)
|
|
||||||
|
|
||||||
return UnreadNotificationsResponse(
|
|
||||||
page=page,
|
|
||||||
size=size,
|
|
||||||
total=total,
|
|
||||||
notifications=notifications,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
"""Run the MCP server using the configured transport."""
|
|
||||||
|
|
||||||
app.run(transport=settings.transport)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": # pragma: no cover - manual execution
|
|
||||||
main()
|
|
||||||
|
|
||||||
|
|||||||
@@ -100,28 +100,10 @@ server {
|
|||||||
# auth_basic_user_file /etc/nginx/.htpasswd;
|
# auth_basic_user_file /etc/nginx/.htpasswd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- WEBSOCKET GATEWAY TO :8082 ----------
|
||||||
location ^~ /websocket/ {
|
location ^~ /websocket/ {
|
||||||
proxy_pass http://127.0.0.1:8084/;
|
proxy_pass http://127.0.0.1:8084/;
|
||||||
proxy_http_version 1.1;
|
|
||||||
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_set_header X-Forwarded-Host $host;
|
|
||||||
|
|
||||||
proxy_read_timeout 300s;
|
|
||||||
proxy_send_timeout 300s;
|
|
||||||
proxy_buffering off;
|
|
||||||
proxy_cache off;
|
|
||||||
add_header Cache-Control "no-store" always;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /mcp {
|
|
||||||
proxy_pass http://127.0.0.1:8085;
|
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
|||||||
@@ -8,8 +8,11 @@ server {
|
|||||||
listen 443 ssl;
|
listen 443 ssl;
|
||||||
server_name staging.open-isle.com www.staging.open-isle.com;
|
server_name staging.open-isle.com www.staging.open-isle.com;
|
||||||
|
|
||||||
|
|
||||||
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
|
ssl_certificate /etc/letsencrypt/live/staging.open-isle.com/fullchain.pem;
|
||||||
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
|
ssl_certificate_key /etc/letsencrypt/live/staging.open-isle.com/privkey.pem;
|
||||||
|
# ssl_certificate /etc/letsencrypt/live/open-isle.com/fullchain.pem;
|
||||||
|
# ssl_certificate_key /etc/letsencrypt/live/open-isle.com/privkey.pem;
|
||||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||||
|
|
||||||
@@ -37,13 +40,59 @@ server {
|
|||||||
add_header X-Upstream $upstream_addr always;
|
add_header X-Upstream $upstream_addr always;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# 1) 原生 WebSocket
|
||||||
|
location ^~ /api/ws {
|
||||||
|
proxy_pass http://127.0.0.1:8081; # 不要尾随 /,保留原样 URI
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
# 升级所需
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
# 统一透传这些头(你在 /api/ 有,/api/ws 也要有)
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
proxy_send_timeout 300s;
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_cache off;
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2) SockJS(包含 /info、/iframe.html、/.../websocket 等)
|
||||||
|
location ^~ /api/sockjs {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
proxy_send_timeout 300s;
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_cache off;
|
||||||
|
|
||||||
|
# 如要同源 iframe 回退,下面两行二选一(或者交给 Spring Security 的 sameOrigin)
|
||||||
|
# proxy_hide_header X-Frame-Options;
|
||||||
|
# add_header X-Frame-Options "SAMEORIGIN" always;
|
||||||
|
}
|
||||||
|
|
||||||
# ---------- API ----------
|
# ---------- API ----------
|
||||||
location /api/ {
|
location /api/ {
|
||||||
proxy_pass http://127.0.0.1:8081/api/;
|
proxy_pass http://127.0.0.1:8081/api/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection $connection_upgrade;
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
@@ -60,6 +109,7 @@ server {
|
|||||||
proxy_cache_bypass 1;
|
proxy_cache_bypass 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ---------- WEBSOCKET GATEWAY TO :8083 ----------
|
||||||
location ^~ /websocket/ {
|
location ^~ /websocket/ {
|
||||||
proxy_pass http://127.0.0.1:8083/;
|
proxy_pass http://127.0.0.1:8083/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -80,24 +130,4 @@ server {
|
|||||||
add_header Cache-Control "no-store" always;
|
add_header Cache-Control "no-store" always;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /mcp {
|
}
|
||||||
proxy_pass http://127.0.0.1:8086;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_set_header X-Forwarded-Host $host;
|
|
||||||
|
|
||||||
proxy_read_timeout 300s;
|
|
||||||
proxy_send_timeout 300s;
|
|
||||||
proxy_buffering off;
|
|
||||||
proxy_cache off;
|
|
||||||
add_header Cache-Control "no-store" always;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user