Compare commits

...

26 Commits

Author SHA1 Message Date
Tim
adfc05b9b2 feat: add admin point grants and history UI 2025-09-30 20:11:45 +08:00
Tim
3b92bdaf2a Merge pull request #1038 from smallclover/main
修改按钮样式
2025-09-30 10:46:07 +08:00
smallclover
f872a32410 修改按钮样式
1. 文字变为白色
2. 按钮样式和其他按钮统一
2025-09-29 21:47:12 +09:00
tim
0119605649 feat: 先把每日定时构件给注释掉 2025-09-29 01:14:50 +08:00
Tim
774611f3a8 Merge pull request #1033 from nagisa77/feature/open_search
Feature: Open Search
2025-09-28 19:19:21 +08:00
Tim
04616a30f3 fix: 新增端口指定 2025-09-28 19:13:17 +08:00
Tim
c0ca615439 feat: 新增docker部署相关信息 2025-09-28 18:05:49 +08:00
Tim
b0597d34b6 fix: 去除无用代码 2025-09-28 17:58:58 +08:00
Tim
e3f680ad0f fix: 索引/查询规则微调 2025-09-28 17:58:10 +08:00
Tim
c8a1e6d8c8 fix: 禁用首字母匹配 2025-09-28 15:21:02 +08:00
Tim
ffebeb46b7 fix: 新增拼音 2025-09-28 15:08:20 +08:00
Tim
2977d2898f fix: 后端highlight 2025-09-28 14:55:56 +08:00
Tim
8869121bcb fix: add pinyin 2025-09-28 14:28:45 +08:00
Tim
61f6e7c90a Merge pull request #1034 from smallclover/main
UI调整
2025-09-28 10:06:28 +08:00
smallclover
892aa6a7c6 UI调整
https://github.com/nagisa77/OpenIsle/issues/855
2025-09-27 08:59:11 +09:00
tim
23cc2d1606 feat: 新增贴文reindex 2025-09-26 18:19:29 +08:00
tim
44addd2a7b fix: 搜索main路径跑通 2025-09-26 18:03:25 +08:00
tim
0bc65077df feat: opensearch init 2025-09-26 16:37:13 +08:00
tim
69869348f6 Revert "feat: add open search support"
This reverts commit 4821b77c17.
2025-09-26 15:36:31 +08:00
Tim
4821b77c17 feat: add open search support 2025-09-26 15:34:06 +08:00
Tim
4fc7c861ee Merge pull request #1030 from nagisa77/codex/add-op/-identifier-in-posts
feat: add OP badge to post comments
2025-09-25 13:37:28 +08:00
Tim
81dfddf6e1 feat: highlight post author in comments 2025-09-25 13:34:25 +08:00
Tim
8b93aa95cf Merge pull request #1027 from nagisa77/feature/avatar_count
fix: 移动端头像显示问题 #1023
2025-09-24 16:58:29 +08:00
tim
425fc7d2b1 fix: 移动端头像显示问题 #1023 2025-09-24 16:57:42 +08:00
Tim
0fff73b682 Merge pull request #1025 from nagisa77/codex/add-pagination-support-for-tags-qfn36n
feat: paginate tags across backend and ui
2025-09-24 16:18:09 +08:00
Tim
e1171212d7 Merge pull request #1026 from nagisa77/codex/fix-dropdown-to-scroll-after-loading-more
fix: keep dropdown at bottom after loading more
2025-09-24 16:15:14 +08:00
45 changed files with 1813 additions and 73 deletions

View File

@@ -2,8 +2,8 @@ name: CI & CD
on:
workflow_dispatch:
schedule:
- cron: "0 19 * * *" # 每天 UTC 19:00相当于北京时间凌晨3点
# schedule:
# - cron: "0 19 * * *" # 每天 UTC 19:00相当于北京时间凌晨3点
jobs:
build-and-deploy:

View File

@@ -132,6 +132,19 @@
<artifactId>springdoc-openapi-starter-webmvc-api</artifactId>
<version>2.2.0</version>
</dependency>
<!-- 高阶 Java 客户端 -->
<dependency>
<groupId>org.opensearch.client</groupId>
<artifactId>opensearch-java</artifactId>
<version>3.2.0</version>
</dependency>
<!-- 低阶 RestClient提供 org.opensearch.client.RestClient 给你的 RestClientTransport 用 -->
<dependency>
<groupId>org.opensearch.client</groupId>
<artifactId>opensearch-rest-client</artifactId>
<version>3.2.0</version>
</dependency>
</dependencies>
<build>

View File

@@ -0,0 +1,35 @@
package com.openisle.controller;
import com.openisle.dto.AdminGrantPointRequest;
import com.openisle.service.PointService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/admin/points")
@RequiredArgsConstructor
public class AdminPointController {
private final PointService pointService;
@PostMapping("/grant")
@SecurityRequirement(name = "JWT")
@Operation(summary = "Grant points", description = "Grant points to a user as administrator")
@ApiResponse(responseCode = "200", description = "Points granted")
public Map<String, Object> grant(
@RequestBody AdminGrantPointRequest request,
Authentication auth
) {
String username = request.getUsername();
int balance = pointService.grantPointByAdmin(auth.getName(), username, request.getAmount());
return Map.of("username", username.trim(), "point", balance);
}
}

View File

@@ -115,6 +115,9 @@ public class SearchController {
dto.setSubText(r.subText());
dto.setExtra(r.extra());
dto.setPostId(r.postId());
dto.setHighlightedText(r.highlightedText());
dto.setHighlightedSubText(r.highlightedSubText());
dto.setHighlightedExtra(r.highlightedExtra());
return dto;
})
.collect(Collectors.toList());

View File

@@ -0,0 +1,12 @@
package com.openisle.dto;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class AdminGrantPointRequest {
private String username;
private int amount;
}

View File

@@ -12,4 +12,7 @@ public class SearchResultDto {
private String subText;
private String extra;
private Long postId;
private String highlightedText;
private String highlightedSubText;
private String highlightedExtra;
}

View File

@@ -13,4 +13,5 @@ public enum PointHistoryType {
REDEEM,
LOTTERY_JOIN,
LOTTERY_REWARD,
ADMIN_GRANT,
}

View File

@@ -0,0 +1,14 @@
package com.openisle.search;
public class NoopSearchIndexer implements SearchIndexer {
@Override
public void indexDocument(String index, SearchDocument document) {
// no-op
}
@Override
public void deleteDocument(String index, Long id) {
// no-op
}
}

View File

@@ -0,0 +1,78 @@
package com.openisle.search;
import org.apache.hc.client5.http.auth.AuthScope;
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
import org.apache.hc.core5.http.HttpHost;
import org.opensearch.client.RestClient;
import org.opensearch.client.RestClientBuilder;
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.transport.rest_client.RestClientTransport;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
@Configuration
@EnableConfigurationProperties(OpenSearchProperties.class)
public class OpenSearchConfig {
@Bean(destroyMethod = "close")
@ConditionalOnProperty(prefix = "app.search", name = "enabled", havingValue = "true")
public RestClient openSearchRestClient(OpenSearchProperties properties) {
RestClientBuilder builder = RestClient.builder(
new HttpHost(properties.getScheme(), properties.getHost(), properties.getPort())
);
if (StringUtils.hasText(properties.getUsername())) {
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(
new AuthScope(properties.getHost(), properties.getPort()),
new UsernamePasswordCredentials(
properties.getUsername(),
properties.getPassword() != null ? properties.getPassword().toCharArray() : new char[0]
)
);
builder.setHttpClientConfigCallback(httpClientBuilder ->
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)
);
}
return builder.build();
}
@Bean(destroyMethod = "close")
@ConditionalOnBean(RestClient.class)
public RestClientTransport openSearchTransport(RestClient restClient) {
return new RestClientTransport(restClient, new JacksonJsonpMapper());
}
@Bean
@ConditionalOnBean(RestClientTransport.class)
public OpenSearchClient openSearchClient(RestClientTransport transport) {
return new OpenSearchClient(transport);
}
@Bean
@ConditionalOnBean(OpenSearchClient.class)
public SearchIndexInitializer searchIndexInitializer(
OpenSearchClient client,
OpenSearchProperties properties
) {
return new SearchIndexInitializer(client, properties);
}
@Bean
@ConditionalOnBean(OpenSearchClient.class)
public SearchIndexer openSearchIndexer(OpenSearchClient client, OpenSearchProperties properties) {
return new OpenSearchIndexer(client);
}
@Bean
@ConditionalOnMissingBean(SearchIndexer.class)
public SearchIndexer noopSearchIndexer() {
return new NoopSearchIndexer();
}
}

View File

@@ -0,0 +1,49 @@
package com.openisle.search;
import java.io.IOException;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.opensearch.core.DeleteRequest;
import org.opensearch.client.opensearch.core.IndexRequest;
import org.opensearch.client.opensearch.core.IndexResponse;
@Slf4j
@RequiredArgsConstructor
public class OpenSearchIndexer implements SearchIndexer {
private final OpenSearchClient client;
@Override
public void indexDocument(String index, SearchDocument document) {
if (document == null || document.entityId() == null) {
return;
}
try {
IndexRequest<SearchDocument> request = IndexRequest.of(builder ->
builder.index(index).id(document.entityId().toString()).document(document)
);
IndexResponse response = client.index(request);
log.info(
"Indexed document {} into {} with result {}",
document.entityId(),
index,
response.result()
);
} catch (IOException e) {
log.warn("Failed to index document {} into {}", document.entityId(), index, e);
}
}
@Override
public void deleteDocument(String index, Long id) {
if (id == null) {
return;
}
try {
client.delete(DeleteRequest.of(builder -> builder.index(index).id(id.toString())));
} catch (IOException e) {
log.warn("Failed to delete document {} from {}", id, index, e);
}
}
}

View File

@@ -0,0 +1,63 @@
package com.openisle.search;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Getter
@Setter
@ConfigurationProperties(prefix = "app.search")
public class OpenSearchProperties {
private boolean enabled = false;
private String host = "localhost";
private int port = 9200;
private String scheme = "http";
private String username;
private String password;
private String indexPrefix = "openisle";
private boolean initialize = true;
private int highlightFragmentSize = 200;
private boolean reindexOnStartup = false;
private int reindexBatchSize = 500;
private Indices indices = new Indices();
public String postsIndex() {
return indexName(indices.posts);
}
public String commentsIndex() {
return indexName(indices.comments);
}
public String usersIndex() {
return indexName(indices.users);
}
public String categoriesIndex() {
return indexName(indices.categories);
}
public String tagsIndex() {
return indexName(indices.tags);
}
private String indexName(String suffix) {
if (indexPrefix == null || indexPrefix.isBlank()) {
return suffix;
}
return indexPrefix + "-" + suffix;
}
@Getter
@Setter
public static class Indices {
private String posts = "posts";
private String comments = "comments";
private String users = "users";
private String categories = "categories";
private String tags = "tags";
}
}

View File

@@ -0,0 +1,15 @@
package com.openisle.search;
import java.util.List;
public record SearchDocument(
String type,
Long entityId,
String title,
String content,
String author,
String category,
List<String> tags,
Long postId,
Long createdAt
) {}

View File

@@ -0,0 +1,127 @@
package com.openisle.search;
import com.openisle.model.Category;
import com.openisle.model.Comment;
import com.openisle.model.Post;
import com.openisle.model.Tag;
import com.openisle.model.User;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public final class SearchDocumentFactory {
private SearchDocumentFactory() {}
public static SearchDocument fromPost(Post post) {
if (post == null || post.getId() == null) {
return null;
}
List<String> tags = post.getTags() == null
? Collections.emptyList()
: post
.getTags()
.stream()
.map(Tag::getName)
.filter(Objects::nonNull)
.collect(Collectors.toList());
return new SearchDocument(
"post",
post.getId(),
post.getTitle(),
post.getContent(),
post.getAuthor() != null ? post.getAuthor().getUsername() : null,
post.getCategory() != null ? post.getCategory().getName() : null,
tags,
post.getId(),
toEpochMillis(post.getCreatedAt())
);
}
public static SearchDocument fromComment(Comment comment) {
if (comment == null || comment.getId() == null) {
return null;
}
Post post = comment.getPost();
List<String> tags = post == null || post.getTags() == null
? Collections.emptyList()
: post
.getTags()
.stream()
.map(Tag::getName)
.filter(Objects::nonNull)
.collect(Collectors.toList());
return new SearchDocument(
"comment",
comment.getId(),
post != null ? post.getTitle() : null,
comment.getContent(),
comment.getAuthor() != null ? comment.getAuthor().getUsername() : null,
post != null && post.getCategory() != null ? post.getCategory().getName() : null,
tags,
post != null ? post.getId() : null,
toEpochMillis(comment.getCreatedAt())
);
}
public static SearchDocument fromUser(User user) {
if (user == null || user.getId() == null) {
return null;
}
return new SearchDocument(
"user",
user.getId(),
user.getUsername(),
user.getIntroduction(),
null,
null,
Collections.emptyList(),
null,
toEpochMillis(user.getCreatedAt())
);
}
public static SearchDocument fromCategory(Category category) {
if (category == null || category.getId() == null) {
return null;
}
return new SearchDocument(
"category",
category.getId(),
category.getName(),
category.getDescription(),
null,
null,
Collections.emptyList(),
null,
null
);
}
public static SearchDocument fromTag(Tag tag) {
if (tag == null || tag.getId() == null) {
return null;
}
return new SearchDocument(
"tag",
tag.getId(),
tag.getName(),
tag.getDescription(),
null,
null,
Collections.emptyList(),
null,
toEpochMillis(tag.getCreatedAt())
);
}
private static Long toEpochMillis(LocalDateTime time) {
if (time == null) {
return null;
}
return time.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
}
}

View File

@@ -0,0 +1,33 @@
package com.openisle.search;
import com.openisle.search.event.DeleteDocumentEvent;
import com.openisle.search.event.IndexDocumentEvent;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.transaction.event.TransactionPhase;
import org.springframework.transaction.event.TransactionalEventListener;
@Slf4j
@Component
@RequiredArgsConstructor
public class SearchIndexEventListener {
private final SearchIndexer searchIndexer;
@TransactionalEventListener(phase = TransactionPhase.AFTER_COMMIT, fallbackExecution = true)
public void handleIndex(IndexDocumentEvent event) {
if (event == null || event.document() == null) {
return;
}
searchIndexer.indexDocument(event.index(), event.document());
}
@TransactionalEventListener(phase = TransactionPhase.AFTER_COMMIT, fallbackExecution = true)
public void handleDelete(DeleteDocumentEvent event) {
if (event == null) {
return;
}
searchIndexer.deleteDocument(event.index(), event.id());
}
}

View File

@@ -0,0 +1,99 @@
package com.openisle.search;
import com.openisle.model.Category;
import com.openisle.model.Comment;
import com.openisle.model.Post;
import com.openisle.model.PostStatus;
import com.openisle.model.Tag;
import com.openisle.model.User;
import com.openisle.search.event.DeleteDocumentEvent;
import com.openisle.search.event.IndexDocumentEvent;
import lombok.RequiredArgsConstructor;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.stereotype.Component;
@Component
@RequiredArgsConstructor
public class SearchIndexEventPublisher {
private final ApplicationEventPublisher publisher;
private final OpenSearchProperties properties;
public void publishPostSaved(Post post) {
if (!properties.isEnabled() || post == null || post.getStatus() != PostStatus.PUBLISHED) {
return;
}
SearchDocument document = SearchDocumentFactory.fromPost(post);
if (document != null) {
publisher.publishEvent(new IndexDocumentEvent(properties.postsIndex(), document));
}
}
public void publishPostDeleted(Long postId) {
if (!properties.isEnabled() || postId == null) {
return;
}
publisher.publishEvent(new DeleteDocumentEvent(properties.postsIndex(), postId));
}
public void publishCommentSaved(Comment comment) {
if (!properties.isEnabled() || comment == null) {
return;
}
SearchDocument document = SearchDocumentFactory.fromComment(comment);
if (document != null) {
publisher.publishEvent(new IndexDocumentEvent(properties.commentsIndex(), document));
}
}
public void publishCommentDeleted(Long commentId) {
if (!properties.isEnabled() || commentId == null) {
return;
}
publisher.publishEvent(new DeleteDocumentEvent(properties.commentsIndex(), commentId));
}
public void publishUserSaved(User user) {
if (!properties.isEnabled() || user == null) {
return;
}
SearchDocument document = SearchDocumentFactory.fromUser(user);
if (document != null) {
publisher.publishEvent(new IndexDocumentEvent(properties.usersIndex(), document));
}
}
public void publishCategorySaved(Category category) {
if (!properties.isEnabled() || category == null) {
return;
}
SearchDocument document = SearchDocumentFactory.fromCategory(category);
if (document != null) {
publisher.publishEvent(new IndexDocumentEvent(properties.categoriesIndex(), document));
}
}
public void publishCategoryDeleted(Long categoryId) {
if (!properties.isEnabled() || categoryId == null) {
return;
}
publisher.publishEvent(new DeleteDocumentEvent(properties.categoriesIndex(), categoryId));
}
public void publishTagSaved(Tag tag) {
if (!properties.isEnabled() || tag == null || !tag.isApproved()) {
return;
}
SearchDocument document = SearchDocumentFactory.fromTag(tag);
if (document != null) {
publisher.publishEvent(new IndexDocumentEvent(properties.tagsIndex(), document));
}
}
public void publishTagDeleted(Long tagId) {
if (!properties.isEnabled() || tagId == null) {
return;
}
publisher.publishEvent(new DeleteDocumentEvent(properties.tagsIndex(), tagId));
}
}

View File

@@ -0,0 +1,219 @@
package com.openisle.search;
import jakarta.annotation.PostConstruct;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.client.json.JsonData;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.opensearch._types.mapping.Property;
import org.opensearch.client.opensearch._types.mapping.TypeMapping;
import org.opensearch.client.opensearch.indices.IndexSettings;
@Slf4j
@RequiredArgsConstructor
public class SearchIndexInitializer {
private final OpenSearchClient client;
private final OpenSearchProperties properties;
@PostConstruct
public void initialize() {
if (!properties.isEnabled() || !properties.isInitialize()) {
return;
}
ensureIndex(properties.postsIndex(), this::postMapping);
ensureIndex(properties.commentsIndex(), this::commentMapping);
ensureIndex(properties.usersIndex(), this::userMapping);
ensureIndex(properties.categoriesIndex(), this::categoryMapping);
ensureIndex(properties.tagsIndex(), this::tagMapping);
}
private void ensureIndex(String index, java.util.function.Supplier<TypeMapping> mappingSupplier) {
try {
boolean exists = client
.indices()
.exists(builder -> builder.index(index))
.value();
if (exists) {
return;
}
client
.indices()
.create(builder ->
builder.index(index).settings(this::applyPinyinAnalysis).mappings(mappingSupplier.get())
);
log.info("Created OpenSearch index {}", index);
} catch (IOException e) {
log.warn("Failed to initialize OpenSearch index {}", index, e);
}
}
private TypeMapping postMapping() {
return TypeMapping.of(builder ->
builder
.properties("type", Property.of(p -> p.keyword(k -> k)))
.properties("title", textWithRawAndPinyin())
.properties("content", textWithPinyinOnly()) // content 不做 .raw避免超长 keyword
.properties("author", keywordWithRawAndPinyin())
.properties("category", keywordWithRawAndPinyin())
.properties("tags", keywordWithRawAndPinyin())
.properties("postId", Property.of(p -> p.long_(l -> l)))
.properties(
"createdAt",
Property.of(p -> p.date(d -> d.format("strict_date_optional_time||epoch_millis")))
)
);
}
private TypeMapping commentMapping() {
return TypeMapping.of(builder ->
builder
.properties("type", Property.of(p -> p.keyword(k -> k)))
.properties("title", textWithRawAndPinyin())
.properties("content", textWithPinyinOnly())
.properties("author", keywordWithRawAndPinyin())
.properties("category", keywordWithRawAndPinyin())
.properties("tags", keywordWithRawAndPinyin())
.properties("postId", Property.of(p -> p.long_(l -> l)))
.properties(
"createdAt",
Property.of(p -> p.date(d -> d.format("strict_date_optional_time||epoch_millis")))
)
);
}
private TypeMapping userMapping() {
return TypeMapping.of(builder ->
builder
.properties("type", Property.of(p -> p.keyword(k -> k)))
.properties("title", textWithRawAndPinyin())
.properties("content", textWithPinyinOnly())
.properties(
"createdAt",
Property.of(p -> p.date(d -> d.format("strict_date_optional_time||epoch_millis")))
)
);
}
private TypeMapping categoryMapping() {
return TypeMapping.of(builder ->
builder
.properties("type", Property.of(p -> p.keyword(k -> k)))
.properties("title", textWithRawAndPinyin())
.properties("content", textWithPinyinOnly())
);
}
private TypeMapping tagMapping() {
return TypeMapping.of(builder ->
builder
.properties("type", Property.of(p -> p.keyword(k -> k)))
.properties("title", textWithRawAndPinyin())
.properties("content", textWithPinyinOnly())
.properties(
"createdAt",
Property.of(p -> p.date(d -> d.format("strict_date_optional_time||epoch_millis")))
)
);
}
/** 文本字段:.rawkeyword 精确) + .py拼音短语精确 + .zhICU+2~3gram 召回) */
private Property textWithRawAndPinyin() {
return Property.of(p ->
p.text(t ->
t
.fields("raw", f -> f.keyword(k -> k.normalizer("lowercase_normalizer")))
.fields("py", f -> f.text(sub -> sub.analyzer("py_index").searchAnalyzer("py_search")))
.fields("zh", f ->
f.text(sub -> sub.analyzer("zh_ngram_index").searchAnalyzer("zh_search"))
)
)
);
}
/** 长文本 content保留拼音 + 新增 zh 子字段(不加 .raw避免过长 keyword */
private Property textWithPinyinOnly() {
return Property.of(p ->
p.text(t ->
t
.fields("py", f -> f.text(sub -> sub.analyzer("py_index").searchAnalyzer("py_search")))
.fields("zh", f ->
f.text(sub -> sub.analyzer("zh_ngram_index").searchAnalyzer("zh_search"))
)
)
);
}
/** 关键词字段author/category/tagskeyword 等值 + .py + .zh尽量对齐标题策略 */
private Property keywordWithRawAndPinyin() {
return Property.of(p ->
p.keyword(k ->
k
.normalizer("lowercase_normalizer")
.fields("raw", f -> f.keyword(kk -> kk.normalizer("lowercase_normalizer")))
.fields("py", f -> f.text(sub -> sub.analyzer("py_index").searchAnalyzer("py_search")))
.fields("zh", f ->
f.text(sub -> sub.analyzer("zh_ngram_index").searchAnalyzer("zh_search"))
)
)
);
}
/** 新增 zh 分析器ICU + 2~3gram并保留你已有的 pinyin/normalizer 设置 */
private IndexSettings.Builder applyPinyinAnalysis(IndexSettings.Builder builder) {
Map<String, JsonData> settings = new LinkedHashMap<>();
// --- 已有keyword normalizer用于 .raw
settings.put("analysis.normalizer.lowercase_normalizer.type", JsonData.of("custom"));
settings.put(
"analysis.normalizer.lowercase_normalizer.filter",
JsonData.of(List.of("lowercase"))
);
// --- 已有pinyin filter + analyzers
settings.put("analysis.filter.py_filter.type", JsonData.of("pinyin"));
settings.put("analysis.filter.py_filter.keep_full_pinyin", JsonData.of(true));
settings.put("analysis.filter.py_filter.keep_joined_full_pinyin", JsonData.of(true));
settings.put("analysis.filter.py_filter.keep_first_letter", JsonData.of(false));
settings.put("analysis.filter.py_filter.remove_duplicated_term", JsonData.of(true));
settings.put("analysis.analyzer.py_index.type", JsonData.of("custom"));
settings.put("analysis.analyzer.py_index.tokenizer", JsonData.of("standard"));
settings.put(
"analysis.analyzer.py_index.filter",
JsonData.of(List.of("lowercase", "py_filter"))
);
settings.put("analysis.analyzer.py_search.type", JsonData.of("custom"));
settings.put("analysis.analyzer.py_search.tokenizer", JsonData.of("standard"));
settings.put(
"analysis.analyzer.py_search.filter",
JsonData.of(List.of("lowercase", "py_filter"))
);
settings.put("analysis.filter.zh_ngram_2_3.type", JsonData.of("ngram"));
settings.put("analysis.filter.zh_ngram_2_3.min_gram", JsonData.of(2));
settings.put("analysis.filter.zh_ngram_2_3.max_gram", JsonData.of(3));
settings.put("analysis.analyzer.zh_ngram_index.type", JsonData.of("custom"));
settings.put("analysis.analyzer.zh_ngram_index.tokenizer", JsonData.of("icu_tokenizer"));
settings.put(
"analysis.analyzer.zh_ngram_index.filter",
JsonData.of(List.of("lowercase", "zh_ngram_2_3"))
);
settings.put("analysis.analyzer.zh_search.type", JsonData.of("custom"));
settings.put("analysis.analyzer.zh_search.tokenizer", JsonData.of("icu_tokenizer"));
settings.put(
"analysis.analyzer.zh_search.filter",
JsonData.of(List.of("lowercase", "zh_ngram_2_3"))
);
settings.forEach(builder::customSettings);
return builder;
}
}

View File

@@ -0,0 +1,6 @@
package com.openisle.search;
public interface SearchIndexer {
void indexDocument(String index, SearchDocument document);
void deleteDocument(String index, Long id);
}

View File

@@ -0,0 +1,30 @@
package com.openisle.search;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
@Slf4j
@Component
@RequiredArgsConstructor
public class SearchReindexInitializer implements CommandLineRunner {
private final OpenSearchProperties properties;
private final SearchReindexService searchReindexService;
@Override
public void run(String... args) {
if (!properties.isEnabled()) {
log.info("Search indexing disabled, skipping startup reindex.");
return;
}
if (!properties.isReindexOnStartup()) {
log.debug("Startup reindex disabled by configuration.");
return;
}
searchReindexService.reindexAll();
}
}

View File

@@ -0,0 +1,94 @@
package com.openisle.search;
import com.openisle.model.Post;
import com.openisle.model.PostStatus;
import com.openisle.model.Tag;
import com.openisle.repository.CategoryRepository;
import com.openisle.repository.CommentRepository;
import com.openisle.repository.PostRepository;
import com.openisle.repository.TagRepository;
import com.openisle.repository.UserRepository;
import java.util.Objects;
import java.util.function.Function;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Slf4j
@Component
@RequiredArgsConstructor
public class SearchReindexService {
private final SearchIndexer searchIndexer;
private final OpenSearchProperties properties;
private final PostRepository postRepository;
private final CommentRepository commentRepository;
private final UserRepository userRepository;
private final CategoryRepository categoryRepository;
private final TagRepository tagRepository;
@Transactional(readOnly = true)
public void reindexAll() {
if (!properties.isEnabled()) {
log.info("Search indexing is disabled, skipping reindex operation.");
return;
}
log.info("Starting full search reindex operation.");
reindex(properties.postsIndex(), postRepository::findAll, (Post post) ->
post.getStatus() == PostStatus.PUBLISHED ? SearchDocumentFactory.fromPost(post) : null
);
reindex(
properties.commentsIndex(),
commentRepository::findAll,
SearchDocumentFactory::fromComment
);
reindex(properties.usersIndex(), userRepository::findAll, SearchDocumentFactory::fromUser);
reindex(
properties.categoriesIndex(),
categoryRepository::findAll,
SearchDocumentFactory::fromCategory
);
reindex(properties.tagsIndex(), tagRepository::findAll, (Tag tag) ->
tag.isApproved() ? SearchDocumentFactory.fromTag(tag) : null
);
log.info("Completed full search reindex operation.");
}
private <T> void reindex(
String index,
Function<Pageable, Page<T>> pageSupplier,
Function<T, SearchDocument> mapper
) {
int batchSize = Math.max(1, properties.getReindexBatchSize());
int pageNumber = 0;
Page<T> page;
do {
Pageable pageable = PageRequest.of(pageNumber, batchSize);
page = pageSupplier.apply(pageable);
if (page.isEmpty() && pageNumber == 0) {
log.info("No entities found for index {}.", index);
}
log.info("Reindexing {} entities for index {}.", page.getTotalElements(), index);
for (T entity : page) {
SearchDocument document = mapper.apply(entity);
if (Objects.nonNull(document)) {
searchIndexer.indexDocument(index, document);
}
}
pageNumber++;
} while (page.hasNext());
}
}

View File

@@ -0,0 +1,3 @@
package com.openisle.search.event;
public record DeleteDocumentEvent(String index, Long id) {}

View File

@@ -0,0 +1,5 @@
package com.openisle.search.event;
import com.openisle.search.SearchDocument;
public record IndexDocumentEvent(String index, SearchDocument document) {}

View File

@@ -3,6 +3,7 @@ package com.openisle.service;
import com.openisle.config.CachingConfig;
import com.openisle.model.Category;
import com.openisle.repository.CategoryRepository;
import com.openisle.search.SearchIndexEventPublisher;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.cache.annotation.CacheEvict;
@@ -14,6 +15,7 @@ import org.springframework.stereotype.Service;
public class CategoryService {
private final CategoryRepository categoryRepository;
private final SearchIndexEventPublisher searchIndexEventPublisher;
@CacheEvict(value = CachingConfig.CATEGORY_CACHE_NAME, allEntries = true)
public Category createCategory(String name, String description, String icon, String smallIcon) {
@@ -22,7 +24,9 @@ public class CategoryService {
category.setDescription(description);
category.setIcon(icon);
category.setSmallIcon(smallIcon);
return categoryRepository.save(category);
Category saved = categoryRepository.save(category);
searchIndexEventPublisher.publishCategorySaved(saved);
return saved;
}
@CacheEvict(value = CachingConfig.CATEGORY_CACHE_NAME, allEntries = true)
@@ -48,12 +52,15 @@ public class CategoryService {
if (smallIcon != null) {
category.setSmallIcon(smallIcon);
}
return categoryRepository.save(category);
Category saved = categoryRepository.save(category);
searchIndexEventPublisher.publishCategorySaved(saved);
return saved;
}
@CacheEvict(value = CachingConfig.CATEGORY_CACHE_NAME, allEntries = true)
public void deleteCategory(Long id) {
categoryRepository.deleteById(id);
searchIndexEventPublisher.publishCategoryDeleted(id);
}
public Category getCategory(Long id) {

View File

@@ -16,6 +16,7 @@ import com.openisle.repository.PointHistoryRepository;
import com.openisle.repository.PostRepository;
import com.openisle.repository.ReactionRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.SearchIndexEventPublisher;
import com.openisle.service.NotificationService;
import com.openisle.service.PointService;
import com.openisle.service.SubscriptionService;
@@ -49,6 +50,7 @@ public class CommentService {
private final PointHistoryRepository pointHistoryRepository;
private final PointService pointService;
private final ImageUploader imageUploader;
private final SearchIndexEventPublisher searchIndexEventPublisher;
@CacheEvict(value = CachingConfig.POST_CACHE_NAME, allEntries = true)
@Transactional
@@ -124,6 +126,7 @@ public class CommentService {
}
notificationService.notifyMentions(content, author, post, comment);
log.debug("addComment finished for comment {}", comment.getId());
searchIndexEventPublisher.publishCommentSaved(comment);
return comment;
}
@@ -221,6 +224,7 @@ public class CommentService {
}
notificationService.notifyMentions(content, author, parent.getPost(), comment);
log.debug("addReply finished for comment {}", comment.getId());
searchIndexEventPublisher.publishCommentSaved(comment);
return comment;
}
@@ -360,7 +364,9 @@ public class CommentService {
// 逻辑删除评论
Post post = comment.getPost();
Long commentId = comment.getId();
commentRepository.delete(comment);
searchIndexEventPublisher.publishCommentDeleted(commentId);
// 删除积分历史
pointHistoryRepository.deleteAll(pointHistories);

View File

@@ -43,6 +43,22 @@ public class PointService {
return addPoint(user, 500, PointHistoryType.FEATURE, post, null, null);
}
public int grantPointByAdmin(String adminName, String targetUsername, int amount) {
if (amount <= 0) {
throw new FieldException("amount", "积分必须为正数");
}
if (targetUsername == null || targetUsername.isBlank()) {
throw new FieldException("username", "用户名不能为空");
}
String normalizedUsername = targetUsername.trim();
User admin = userRepository.findByUsername(adminName).orElseThrow();
User target = userRepository
.findByUsername(normalizedUsername)
.orElseThrow(() -> new FieldException("username", "用户不存在"));
addPoint(target, amount, PointHistoryType.ADMIN_GRANT, null, null, admin);
return target.getPoint();
}
public void processLotteryJoin(User participant, LotteryPost post) {
int cost = post.getPointCost();
if (cost > 0) {

View File

@@ -16,6 +16,7 @@ import com.openisle.repository.PostSubscriptionRepository;
import com.openisle.repository.ReactionRepository;
import com.openisle.repository.TagRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.SearchIndexEventPublisher;
import com.openisle.service.EmailSender;
import java.time.Duration;
import java.time.LocalDateTime;
@@ -73,6 +74,8 @@ public class PostService {
private final ConcurrentMap<Long, ScheduledFuture<?>> scheduledFinalizations =
new ConcurrentHashMap<>();
private final SearchIndexEventPublisher searchIndexEventPublisher;
@Value("${app.website-url:https://www.open-isle.com}")
private String websiteUrl;
@@ -103,7 +106,8 @@ public class PostService {
PostChangeLogService postChangeLogService,
PointHistoryRepository pointHistoryRepository,
@Value("${app.post.publish-mode:DIRECT}") PublishMode publishMode,
RedisTemplate redisTemplate
RedisTemplate redisTemplate,
SearchIndexEventPublisher searchIndexEventPublisher
) {
this.postRepository = postRepository;
this.userRepository = userRepository;
@@ -130,6 +134,7 @@ public class PostService {
this.publishMode = publishMode;
this.redisTemplate = redisTemplate;
this.searchIndexEventPublisher = searchIndexEventPublisher;
}
@EventListener(ApplicationReadyEvent.class)
@@ -346,6 +351,9 @@ public class PostService {
);
scheduledFinalizations.put(pp.getId(), future);
}
if (post.getStatus() == PostStatus.PUBLISHED) {
searchIndexEventPublisher.publishPostSaved(post);
}
return post;
}
@@ -868,10 +876,12 @@ public class PostService {
if (!tag.isApproved()) {
tag.setApproved(true);
tagRepository.save(tag);
searchIndexEventPublisher.publishTagSaved(tag);
}
}
post.setStatus(PostStatus.PUBLISHED);
post = postRepository.save(post);
searchIndexEventPublisher.publishPostSaved(post);
notificationService.createNotification(
post.getAuthor(),
NotificationType.POST_REVIEWED,
@@ -895,13 +905,16 @@ public class PostService {
if (!tag.isApproved()) {
long count = postRepository.countDistinctByTags_Id(tag.getId());
if (count <= 1) {
Long tagId = tag.getId();
post.getTags().remove(tag);
tagRepository.delete(tag);
searchIndexEventPublisher.publishTagDeleted(tagId);
}
}
}
post.setStatus(PostStatus.REJECTED);
post = postRepository.save(post);
searchIndexEventPublisher.publishPostDeleted(post.getId());
notificationService.createNotification(
post.getAuthor(),
NotificationType.POST_REVIEWED,
@@ -1042,6 +1055,9 @@ public class PostService {
if (!oldTags.equals(newTags)) {
postChangeLogService.recordTagChange(updated, user, oldTags, newTags);
}
if (updated.getStatus() == PostStatus.PUBLISHED) {
searchIndexEventPublisher.publishPostSaved(updated);
}
return updated;
}
@@ -1094,8 +1110,10 @@ public class PostService {
}
}
String title = post.getTitle();
Long postId = post.getId();
postChangeLogService.deleteLogsForPost(post);
postRepository.delete(post);
searchIndexEventPublisher.publishPostDeleted(postId);
if (adminDeleting) {
notificationService.createNotification(
author,

View File

@@ -11,14 +11,30 @@ import com.openisle.repository.CommentRepository;
import com.openisle.repository.PostRepository;
import com.openisle.repository.TagRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.OpenSearchProperties;
import com.openisle.search.SearchDocument;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.opensearch._types.FieldValue;
import org.opensearch.client.opensearch._types.query_dsl.TextQueryType;
import org.opensearch.client.opensearch.core.SearchResponse;
import org.opensearch.client.opensearch.core.search.Hit;
import org.springframework.stereotype.Service;
import org.springframework.web.util.HtmlUtils;
@Service
@Slf4j
@RequiredArgsConstructor
public class SearchService {
@@ -27,10 +43,14 @@ public class SearchService {
private final CommentRepository commentRepository;
private final CategoryRepository categoryRepository;
private final TagRepository tagRepository;
private final Optional<OpenSearchClient> openSearchClient;
private final OpenSearchProperties openSearchProperties;
@org.springframework.beans.factory.annotation.Value("${app.snippet-length}")
private int snippetLength;
private static final int DEFAULT_OPEN_SEARCH_LIMIT = 50;
public List<User> searchUsers(String keyword) {
return userRepository.findByUsernameContainingIgnoreCase(keyword);
}
@@ -64,49 +84,113 @@ public class SearchService {
}
public List<SearchResult> globalSearch(String keyword) {
if (keyword == null || keyword.isBlank()) {
return List.of();
}
if (isOpenSearchEnabled()) {
try {
List<SearchResult> results = searchWithOpenSearch(keyword);
if (!results.isEmpty()) {
return results;
}
} catch (IOException e) {
log.warn("OpenSearch global search failed, falling back to database query", e);
}
}
return fallbackGlobalSearch(keyword);
}
private List<SearchResult> fallbackGlobalSearch(String keyword) {
final String effectiveKeyword = keyword == null ? "" : keyword.trim();
Stream<SearchResult> users = searchUsers(keyword)
.stream()
.map(u ->
new SearchResult("user", u.getId(), u.getUsername(), u.getIntroduction(), null, null)
new SearchResult(
"user",
u.getId(),
u.getUsername(),
u.getIntroduction(),
null,
null,
highlightHtml(u.getUsername(), effectiveKeyword),
highlightHtml(u.getIntroduction(), effectiveKeyword),
null
)
);
Stream<SearchResult> categories = searchCategories(keyword)
.stream()
.map(c ->
new SearchResult("category", c.getId(), c.getName(), null, c.getDescription(), null)
new SearchResult(
"category",
c.getId(),
c.getName(),
null,
c.getDescription(),
null,
highlightHtml(c.getName(), effectiveKeyword),
null,
highlightHtml(c.getDescription(), effectiveKeyword)
)
);
Stream<SearchResult> tags = searchTags(keyword)
.stream()
.map(t -> new SearchResult("tag", t.getId(), t.getName(), null, t.getDescription(), null));
.map(t ->
new SearchResult(
"tag",
t.getId(),
t.getName(),
null,
t.getDescription(),
null,
highlightHtml(t.getName(), effectiveKeyword),
null,
highlightHtml(t.getDescription(), effectiveKeyword)
)
);
// Merge post results while removing duplicates between search by content
// and search by title
List<SearchResult> mergedPosts = Stream.concat(
searchPosts(keyword)
.stream()
.map(p ->
new SearchResult(
.map(p -> {
String snippet = extractSnippet(p.getContent(), keyword, false);
return new SearchResult(
"post",
p.getId(),
p.getTitle(),
p.getCategory() != null ? p.getCategory().getName() : null,
extractSnippet(p.getContent(), keyword, false),
null
)
),
snippet,
null,
highlightHtml(p.getTitle(), effectiveKeyword),
highlightHtml(
p.getCategory() != null ? p.getCategory().getName() : null,
effectiveKeyword
),
highlightHtml(snippet, effectiveKeyword)
);
}),
searchPostsByTitle(keyword)
.stream()
.map(p ->
new SearchResult(
.map(p -> {
String snippet = extractSnippet(p.getContent(), keyword, true);
return new SearchResult(
"post_title",
p.getId(),
p.getTitle(),
p.getCategory() != null ? p.getCategory().getName() : null,
extractSnippet(p.getContent(), keyword, true),
null
)
)
snippet,
null,
highlightHtml(p.getTitle(), effectiveKeyword),
highlightHtml(
p.getCategory() != null ? p.getCategory().getName() : null,
effectiveKeyword
),
highlightHtml(snippet, effectiveKeyword)
);
})
)
.collect(
java.util.stream.Collectors.toMap(
@@ -122,22 +206,366 @@ public class SearchService {
Stream<SearchResult> comments = searchComments(keyword)
.stream()
.map(c ->
new SearchResult(
.map(c -> {
String snippet = extractSnippet(c.getContent(), keyword, false);
return new SearchResult(
"comment",
c.getId(),
c.getPost().getTitle(),
c.getAuthor().getUsername(),
extractSnippet(c.getContent(), keyword, false),
c.getPost().getId()
)
);
snippet,
c.getPost().getId(),
highlightHtml(c.getPost().getTitle(), effectiveKeyword),
highlightHtml(c.getAuthor().getUsername(), effectiveKeyword),
highlightHtml(snippet, effectiveKeyword)
);
});
return Stream.of(users, categories, tags, mergedPosts.stream(), comments)
.flatMap(s -> s)
.toList();
}
private boolean isOpenSearchEnabled() {
return openSearchProperties.isEnabled() && openSearchClient.isPresent();
}
// 在类里加上(字段或静态常量都可)
private static final java.util.regex.Pattern HANS_PATTERN = java.util.regex.Pattern.compile(
"\\p{IsHan}"
);
private static boolean containsHan(String s) {
return s != null && HANS_PATTERN.matcher(s).find();
}
private List<SearchResult> searchWithOpenSearch(String keyword) throws IOException {
var client = openSearchClient.orElse(null);
if (client == null) return List.of();
final String qRaw = keyword == null ? "" : keyword.trim();
if (qRaw.isEmpty()) return List.of();
final boolean hasHan = containsHan(qRaw);
SearchResponse<SearchDocument> resp = client.search(
b ->
b
.index(searchIndices())
.trackTotalHits(t -> t.enabled(true))
.query(qb ->
qb.bool(bool -> {
// ---------- 严格层 ----------
// 中文/任意短语(轻微符号/空白扰动)
bool.should(s ->
s.matchPhrase(mp -> mp.field("title").query(qRaw).slop(2).boost(6.0f))
);
bool.should(s ->
s.matchPhrase(mp -> mp.field("content").query(qRaw).slop(2).boost(2.5f))
);
// 结构化等值(.raw
bool.should(s ->
s.term(t ->
t
.field("author.raw")
.value(v -> v.stringValue(qRaw))
.boost(4.0f)
)
);
bool.should(s ->
s.term(t ->
t
.field("category.raw")
.value(v -> v.stringValue(qRaw))
.boost(3.0f)
)
);
bool.should(s ->
s.term(t ->
t
.field("tags.raw")
.value(v -> v.stringValue(qRaw))
.boost(3.0f)
)
);
// 拼音短语(严格)
bool.should(s ->
s.matchPhrase(mp -> mp.field("title.py").query(qRaw).slop(1).boost(4.0f))
);
bool.should(s ->
s.matchPhrase(mp -> mp.field("content.py").query(qRaw).slop(1).boost(1.8f))
);
bool.should(s ->
s.matchPhrase(mp -> mp.field("author.py").query(qRaw).slop(1).boost(2.2f))
);
bool.should(s ->
s.matchPhrase(mp -> mp.field("category.py").query(qRaw).slop(1).boost(2.0f))
);
bool.should(s ->
s.matchPhrase(mp -> mp.field("tags.py").query(qRaw).slop(1).boost(2.0f))
);
// ---------- 放宽层(仅当包含中文时启用) ----------
if (hasHan) {
// title.zh
bool.should(s ->
s.match(m ->
m
.field("title.zh")
.query(org.opensearch.client.opensearch._types.FieldValue.of(qRaw))
.operator(org.opensearch.client.opensearch._types.query_dsl.Operator.Or)
.minimumShouldMatch("2<-1 3<-1 4<-1 5<-2 6<-2 7<-3")
.boost(3.0f)
)
);
// content.zh
bool.should(s ->
s.match(m ->
m
.field("content.zh")
.query(org.opensearch.client.opensearch._types.FieldValue.of(qRaw))
.operator(org.opensearch.client.opensearch._types.query_dsl.Operator.Or)
.minimumShouldMatch("2<-1 3<-1 4<-1 5<-2 6<-2 7<-3")
.boost(1.6f)
)
);
}
return bool.minimumShouldMatch("1");
})
)
// ---------- 高亮:允许跨子字段回填 + 匹配字段组 ----------
.highlight(h -> {
var hb = h
.preTags("<mark>")
.postTags("</mark>")
.requireFieldMatch(false)
.fields("title", f ->
f
.fragmentSize(highlightFragmentSize())
.numberOfFragments(1)
.matchedFields(List.of("title", "title.zh", "title.py"))
)
.fields("content", f ->
f
.fragmentSize(highlightFragmentSize())
.numberOfFragments(1)
.matchedFields(List.of("content", "content.zh", "content.py"))
)
.fields("title.zh", f -> f.fragmentSize(highlightFragmentSize()).numberOfFragments(1))
.fields("content.zh", f ->
f.fragmentSize(highlightFragmentSize()).numberOfFragments(1)
)
.fields("title.py", f -> f.fragmentSize(highlightFragmentSize()).numberOfFragments(1))
.fields("content.py", f ->
f.fragmentSize(highlightFragmentSize()).numberOfFragments(1)
)
.fields("author", f -> f.numberOfFragments(0))
.fields("author.py", f -> f.numberOfFragments(0))
.fields("category", f -> f.numberOfFragments(0))
.fields("category.py", f -> f.numberOfFragments(0))
.fields("tags", f -> f.numberOfFragments(0))
.fields("tags.py", f -> f.numberOfFragments(0));
return hb;
})
.size(DEFAULT_OPEN_SEARCH_LIMIT > 0 ? DEFAULT_OPEN_SEARCH_LIMIT : 10),
SearchDocument.class
);
return mapHits(resp.hits().hits(), qRaw);
}
/** Lucene query_string 安全转义(保留 * 由我们自己追加) */
private static String escapeForQueryString(String s) {
if (s == null || s.isEmpty()) return "";
StringBuilder sb = new StringBuilder(s.length() * 2);
for (char c : s.toCharArray()) {
switch (c) {
case '+':
case '-':
case '=':
case '&':
case '|':
case '>':
case '<':
case '!':
case '(':
case ')':
case '{':
case '}':
case '[':
case ']':
case '^':
case '"':
case '~': /* case '*': */ /* case '?': */
case ':':
case '\\':
case '/':
sb.append('\\').append(c);
break;
default:
sb.append(c);
}
}
return sb.toString();
}
private int highlightFragmentSize() {
int configured = openSearchProperties.getHighlightFragmentSize();
if (configured > 0) {
return configured;
}
if (snippetLength > 0) {
return snippetLength;
}
return 200;
}
private List<String> searchIndices() {
return List.of(
openSearchProperties.postsIndex(),
openSearchProperties.commentsIndex(),
openSearchProperties.usersIndex(),
openSearchProperties.categoriesIndex(),
openSearchProperties.tagsIndex()
);
}
private List<SearchResult> mapHits(List<Hit<SearchDocument>> hits, String keyword) {
List<SearchResult> results = new ArrayList<>();
for (Hit<SearchDocument> hit : hits) {
SearchResult result = mapHit(hit, keyword);
if (result != null) {
results.add(result);
}
}
return results;
}
private SearchResult mapHit(Hit<SearchDocument> hit, String keyword) {
SearchDocument document = hit.source();
if (document == null || document.entityId() == null) {
return null;
}
Map<String, List<String>> highlight = hit.highlight();
String highlightedContent = firstHighlight(
highlight,
"content",
"content.py",
"content.zh",
"content.raw"
);
String highlightedTitle = firstHighlight(
highlight,
"title",
"title.py",
"title.zh",
"title.raw"
);
String highlightedAuthor = firstHighlight(highlight, "author", "author.py");
String highlightedCategory = firstHighlight(highlight, "category", "category.py");
boolean highlightTitle = highlightedTitle != null && !highlightedTitle.isBlank();
String documentType = document.type() != null ? document.type() : "";
String effectiveType = documentType;
if ("post".equals(documentType) && highlightTitle) {
effectiveType = "post_title";
}
String snippetHtml = highlightedContent != null && !highlightedContent.isBlank()
? highlightedContent
: null;
if (snippetHtml == null && highlightTitle) {
snippetHtml = highlightedTitle;
}
String snippet = snippetHtml != null && !snippetHtml.isBlank()
? cleanHighlight(snippetHtml)
: null;
boolean fromStart = "post_title".equals(effectiveType);
if (snippet == null || snippet.isBlank()) {
snippet = fallbackSnippet(document.content(), keyword, fromStart);
if (snippetHtml == null) {
snippetHtml = highlightHtml(snippet, keyword);
}
} else if (snippetHtml == null) {
snippetHtml = highlightHtml(snippet, keyword);
}
if (snippet == null) {
snippet = "";
}
String subText = null;
Long postId = null;
if ("post".equals(documentType) || "post_title".equals(effectiveType)) {
subText = document.category();
} else if ("comment".equals(documentType)) {
subText = document.author();
postId = document.postId();
}
String highlightedText = highlightTitle
? highlightedTitle
: highlightHtml(document.title(), keyword);
String highlightedSubText;
if ("comment".equals(documentType)) {
highlightedSubText = highlightedAuthor != null && !highlightedAuthor.isBlank()
? highlightedAuthor
: highlightHtml(subText, keyword);
} else if ("post".equals(documentType) || "post_title".equals(effectiveType)) {
highlightedSubText = highlightedCategory != null && !highlightedCategory.isBlank()
? highlightedCategory
: highlightHtml(subText, keyword);
} else {
highlightedSubText = highlightHtml(subText, keyword);
}
String highlightedExtra = snippetHtml != null ? snippetHtml : highlightHtml(snippet, keyword);
return new SearchResult(
effectiveType,
document.entityId(),
document.title(),
subText,
snippet,
postId,
highlightedText,
highlightedSubText,
highlightedExtra
);
}
private String firstHighlight(Map<String, List<String>> highlight, String... fields) {
if (highlight == null || fields == null) {
return null;
}
for (String field : fields) {
if (field == null) {
continue;
}
List<String> values = highlight.get(field);
if (values == null || values.isEmpty()) {
continue;
}
for (String value : values) {
if (value != null && !value.isBlank()) {
return value;
}
}
}
return null;
}
private String cleanHighlight(String value) {
if (value == null) {
return null;
}
return value.replaceAll("<[^>]+>", "");
}
private String fallbackSnippet(String content, String keyword, boolean fromStart) {
if (content == null) {
return "";
}
return extractSnippet(content, keyword, fromStart);
}
private String extractSnippet(String content, String keyword, boolean fromStart) {
if (content == null) return "";
int limit = snippetLength;
@@ -165,12 +593,45 @@ public class SearchService {
return snippet;
}
private String highlightHtml(String text, String keyword) {
if (text == null) {
return null;
}
String normalizedKeyword = keyword == null ? "" : keyword.trim();
if (normalizedKeyword.isEmpty()) {
return HtmlUtils.htmlEscape(text);
}
Pattern pattern = Pattern.compile(
Pattern.quote(normalizedKeyword),
Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE
);
Matcher matcher = pattern.matcher(text);
if (!matcher.find()) {
return HtmlUtils.htmlEscape(text);
}
matcher.reset();
StringBuilder sb = new StringBuilder();
int lastEnd = 0;
while (matcher.find()) {
sb.append(HtmlUtils.htmlEscape(text.substring(lastEnd, matcher.start())));
sb.append("<mark>");
sb.append(HtmlUtils.htmlEscape(matcher.group()));
sb.append("</mark>");
lastEnd = matcher.end();
}
sb.append(HtmlUtils.htmlEscape(text.substring(lastEnd)));
return sb.toString();
}
public record SearchResult(
String type,
Long id,
String text,
String subText,
String extra,
Long postId
Long postId,
String highlightedText,
String highlightedSubText,
String highlightedExtra
) {}
}

View File

@@ -5,6 +5,7 @@ import com.openisle.model.Tag;
import com.openisle.model.User;
import com.openisle.repository.TagRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.SearchIndexEventPublisher;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.cache.annotation.CacheEvict;
@@ -20,6 +21,7 @@ public class TagService {
private final TagRepository tagRepository;
private final TagValidator tagValidator;
private final UserRepository userRepository;
private final SearchIndexEventPublisher searchIndexEventPublisher;
@CacheEvict(value = CachingConfig.TAG_CACHE_NAME, allEntries = true)
public Tag createTag(
@@ -43,7 +45,9 @@ public class TagService {
.orElseThrow(() -> new com.openisle.exception.NotFoundException("User not found"));
tag.setCreator(creator);
}
return tagRepository.save(tag);
Tag saved = tagRepository.save(tag);
searchIndexEventPublisher.publishTagSaved(saved);
return saved;
}
public Tag createTag(
@@ -78,12 +82,15 @@ public class TagService {
if (smallIcon != null) {
tag.setSmallIcon(smallIcon);
}
return tagRepository.save(tag);
Tag saved = tagRepository.save(tag);
searchIndexEventPublisher.publishTagSaved(saved);
return saved;
}
@CacheEvict(value = CachingConfig.TAG_CACHE_NAME, allEntries = true)
public void deleteTag(Long id) {
tagRepository.deleteById(id);
searchIndexEventPublisher.publishTagDeleted(id);
}
public Tag approveTag(Long id) {
@@ -91,7 +98,9 @@ public class TagService {
.findById(id)
.orElseThrow(() -> new IllegalArgumentException("Tag not found"));
tag.setApproved(true);
return tagRepository.save(tag);
Tag saved = tagRepository.save(tag);
searchIndexEventPublisher.publishTagSaved(saved);
return saved;
}
public List<Tag> listPendingTags() {

View File

@@ -5,6 +5,7 @@ import com.openisle.exception.FieldException;
import com.openisle.model.Role;
import com.openisle.model.User;
import com.openisle.repository.UserRepository;
import com.openisle.search.SearchIndexEventPublisher;
import com.openisle.service.AvatarGenerator;
import com.openisle.service.PasswordValidator;
import com.openisle.service.UsernameValidator;
@@ -34,6 +35,7 @@ public class UserService {
private final RedisTemplate redisTemplate;
private final EmailSender emailService;
private final SearchIndexEventPublisher searchIndexEventPublisher;
public User register(
String username,
@@ -58,7 +60,9 @@ public class UserService {
// u.setVerificationCode(genCode());
u.setRegisterReason(reason);
u.setApproved(mode == com.openisle.model.RegisterMode.DIRECT);
return userRepository.save(u);
User saved = userRepository.save(u);
searchIndexEventPublisher.publishUserSaved(saved);
return saved;
}
// ── 再按邮箱查 ───────────────────────────────────────────
@@ -75,7 +79,9 @@ public class UserService {
// u.setVerificationCode(genCode());
u.setRegisterReason(reason);
u.setApproved(mode == com.openisle.model.RegisterMode.DIRECT);
return userRepository.save(u);
User saved = userRepository.save(u);
searchIndexEventPublisher.publishUserSaved(saved);
return saved;
}
// ── 完全新用户 ───────────────────────────────────────────
@@ -89,14 +95,18 @@ public class UserService {
user.setAvatar(avatarGenerator.generate(username));
user.setRegisterReason(reason);
user.setApproved(mode == com.openisle.model.RegisterMode.DIRECT);
return userRepository.save(user);
User saved = userRepository.save(user);
searchIndexEventPublisher.publishUserSaved(saved);
return saved;
}
public User registerWithInvite(String username, String email, String password) {
User user = register(username, email, password, "", com.openisle.model.RegisterMode.DIRECT);
user.setVerified(true);
// user.setVerificationCode(genCode());
return userRepository.save(user);
User saved = userRepository.save(user);
searchIndexEventPublisher.publishUserSaved(saved);
return saved;
}
private String genCode() {
@@ -209,7 +219,9 @@ public class UserService {
.findByUsername(username)
.orElseThrow(() -> new com.openisle.exception.NotFoundException("User not found"));
user.setRegisterReason(reason);
return userRepository.save(user);
User saved = userRepository.save(user);
searchIndexEventPublisher.publishUserSaved(saved);
return saved;
}
public User updateProfile(String currentUsername, String newUsername, String introduction) {

View File

@@ -45,6 +45,18 @@ app.user.replies-limit=${USER_REPLIES_LIMIT:50}
# Length of extracted snippets for posts and search (-1 to disable truncation)
app.snippet-length=${SNIPPET_LENGTH:200}
# OpenSearch integration
app.search.enabled=${SEARCH_ENABLED:true}
app.search.host=${SEARCH_HOST:localhost}
app.search.port=${SEARCH_PORT:9200}
app.search.scheme=${SEARCH_SCHEME:http}
app.search.username=${SEARCH_USERNAME:}
app.search.password=${SEARCH_PASSWORD:}
app.search.index-prefix=${SEARCH_INDEX_PREFIX:openisle}
app.search.highlight-fragment-size=${SEARCH_HIGHLIGHT_FRAGMENT_SIZE:${SNIPPET_LENGTH:200}}
app.search.reindex-on-startup=${SEARCH_REINDEX_ON_STARTUP:true}
app.search.reindex-batch-size=${SEARCH_REINDEX_BATCH_SIZE:500}
# Captcha configuration
app.captcha.enabled=${CAPTCHA_ENABLED:false}
recaptcha.secret-key=${RECAPTCHA_SECRET_KEY:}

View File

@@ -68,9 +68,9 @@ class SearchControllerTest {
c.setContent("nice");
Mockito.when(searchService.globalSearch("n")).thenReturn(
List.of(
new SearchService.SearchResult("user", 1L, "bob", null, null, null),
new SearchService.SearchResult("post", 2L, "hello", null, null, null),
new SearchService.SearchResult("comment", 3L, "nice", null, null, null)
new SearchService.SearchResult("user", 1L, "bob", null, null, null, null, null, null),
new SearchService.SearchResult("post", 2L, "hello", null, null, null, null, null, null),
new SearchService.SearchResult("comment", 3L, "nice", null, null, null, null, null, null)
)
);

View File

@@ -11,6 +11,7 @@ import com.openisle.repository.PointHistoryRepository;
import com.openisle.repository.PostRepository;
import com.openisle.repository.ReactionRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.SearchIndexEventPublisher;
import com.openisle.service.PointService;
import org.junit.jupiter.api.Test;
@@ -29,6 +30,7 @@ class CommentServiceTest {
PointHistoryRepository pointHistoryRepo = mock(PointHistoryRepository.class);
PointService pointService = mock(PointService.class);
ImageUploader imageUploader = mock(ImageUploader.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
CommentService service = new CommentService(
commentRepo,
@@ -41,7 +43,8 @@ class CommentServiceTest {
nRepo,
pointHistoryRepo,
pointService,
imageUploader
imageUploader,
searchIndexEventPublisher
);
when(commentRepo.countByAuthorAfter(eq("alice"), any())).thenReturn(3L);

View File

@@ -6,6 +6,7 @@ import static org.mockito.Mockito.*;
import com.openisle.exception.RateLimitException;
import com.openisle.model.*;
import com.openisle.repository.*;
import com.openisle.search.SearchIndexEventPublisher;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
@@ -42,6 +43,7 @@ class PostServiceTest {
PostChangeLogService postChangeLogService = mock(PostChangeLogService.class);
PointHistoryRepository pointHistoryRepository = mock(PointHistoryRepository.class);
RedisTemplate redisTemplate = mock(RedisTemplate.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
PostService service = new PostService(
postRepo,
@@ -67,7 +69,8 @@ class PostServiceTest {
postChangeLogService,
pointHistoryRepository,
PublishMode.DIRECT,
redisTemplate
redisTemplate,
searchIndexEventPublisher
);
when(context.getBean(PostService.class)).thenReturn(service);
@@ -118,6 +121,7 @@ class PostServiceTest {
PostChangeLogService postChangeLogService = mock(PostChangeLogService.class);
PointHistoryRepository pointHistoryRepository = mock(PointHistoryRepository.class);
RedisTemplate redisTemplate = mock(RedisTemplate.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
PostService service = new PostService(
postRepo,
@@ -143,7 +147,8 @@ class PostServiceTest {
postChangeLogService,
pointHistoryRepository,
PublishMode.DIRECT,
redisTemplate
redisTemplate,
searchIndexEventPublisher
);
when(context.getBean(PostService.class)).thenReturn(service);
@@ -207,6 +212,7 @@ class PostServiceTest {
PostChangeLogService postChangeLogService = mock(PostChangeLogService.class);
PointHistoryRepository pointHistoryRepository = mock(PointHistoryRepository.class);
RedisTemplate redisTemplate = mock(RedisTemplate.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
PostService service = new PostService(
postRepo,
@@ -232,7 +238,8 @@ class PostServiceTest {
postChangeLogService,
pointHistoryRepository,
PublishMode.DIRECT,
redisTemplate
redisTemplate,
searchIndexEventPublisher
);
when(context.getBean(PostService.class)).thenReturn(service);
@@ -283,6 +290,7 @@ class PostServiceTest {
PostChangeLogService postChangeLogService = mock(PostChangeLogService.class);
PointHistoryRepository pointHistoryRepository = mock(PointHistoryRepository.class);
RedisTemplate redisTemplate = mock(RedisTemplate.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
PostService service = new PostService(
postRepo,
@@ -308,7 +316,8 @@ class PostServiceTest {
postChangeLogService,
pointHistoryRepository,
PublishMode.DIRECT,
redisTemplate
redisTemplate,
searchIndexEventPublisher
);
when(context.getBean(PostService.class)).thenReturn(service);
@@ -375,6 +384,7 @@ class PostServiceTest {
PostChangeLogService postChangeLogService = mock(PostChangeLogService.class);
PointHistoryRepository pointHistoryRepository = mock(PointHistoryRepository.class);
RedisTemplate redisTemplate = mock(RedisTemplate.class);
SearchIndexEventPublisher searchIndexEventPublisher = mock(SearchIndexEventPublisher.class);
PostService service = new PostService(
postRepo,
@@ -400,7 +410,8 @@ class PostServiceTest {
postChangeLogService,
pointHistoryRepository,
PublishMode.DIRECT,
redisTemplate
redisTemplate,
searchIndexEventPublisher
);
when(context.getBean(PostService.class)).thenReturn(service);

View File

@@ -9,7 +9,9 @@ import com.openisle.repository.CommentRepository;
import com.openisle.repository.PostRepository;
import com.openisle.repository.TagRepository;
import com.openisle.repository.UserRepository;
import com.openisle.search.OpenSearchProperties;
import java.util.List;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@@ -27,7 +29,9 @@ class SearchServiceTest {
postRepo,
commentRepo,
categoryRepo,
tagRepo
tagRepo,
Optional.empty(),
new OpenSearchProperties()
);
Post post1 = new Post();

View File

@@ -1,6 +1,11 @@
# 前端访问端口
SERVER_PORT=8080
# OpenSearch 配置
OPENSEARCH_PORT=9200
OPENSEARCH_METRICS_PORT=9600
OPENSEARCH_DASHBOARDS_PORT=5601
# MySQL 配置
MYSQL_ROOT_PASSWORD=toor

9
docker/DockerFile Normal file
View File

@@ -0,0 +1,9 @@
# opensearch
FROM opensearchproject/opensearch:3.0.0
RUN /usr/share/opensearch/bin/opensearch-plugin install -b analysis-icu
RUN /usr/share/opensearch/bin/opensearch-plugin install -b \
https://github.com/aparo/opensearch-analysis-pinyin/releases/download/3.0.0/opensearch-analysis-pinyin.zip
# ...

View File

@@ -14,6 +14,44 @@ services:
- ../backend/src/main/resources/db/init:/docker-entrypoint-initdb.d
networks:
- openisle-network
# OpenSearch Service
opensearch:
build:
context: .
dockerfile: Dockerfile
container_name: opensearch
environment:
- cluster.name=os-single
- node.name=os-node-1
- discovery.type=single-node
- bootstrap.memory_lock=true
- OPENSEARCH_JAVA_OPTS=-Xms1g -Xmx1g
- DISABLE_SECURITY_PLUGIN=true
- cluster.blocks.create_index=false
ulimits:
memlock: { soft: -1, hard: -1 }
nofile: { soft: 65536, hard: 65536 }
volumes:
- ./data:/usr/share/opensearch/data
- ./snapshots:/snapshots
ports:
- "${OPENSEARCH_PORT:-9200}:9200"
- "${OPENSEARCH_METRICS_PORT:-9600}:9600"
restart: unless-stopped
dashboards:
image: opensearchproject/opensearch-dashboards:3.0.0
container_name: os-dashboards
environment:
- OPENSEARCH_HOSTS=["http://opensearch:9200"]
- DISABLE_SECURITY_DASHBOARDS_PLUGIN=true
ports:
- "${OPENSEARCH_DASHBOARDS_PORT:-5601}:5601"
depends_on:
- opensearch
restart: unless-stopped
# Java spring boot service
springboot:

View File

@@ -119,7 +119,7 @@ export default {
.cropper-btn {
padding: 6px 12px;
border-radius: 4px;
border-radius: 10px;
color: var(--primary-color);
border: none;
background: transparent;
@@ -128,7 +128,7 @@ export default {
.cropper-btn.primary {
background: var(--primary-color);
color: var(--text-color);
color: #ffff;
border-color: var(--primary-color);
}

View File

@@ -15,6 +15,7 @@
<div class="common-info-content-header">
<div class="info-content-header-left">
<span class="user-name">{{ comment.userName }}</span>
<span v-if="isCommentFromPostAuthor" class="op-badge" title="楼主">OP</span>
<medal-one class="medal-icon" />
<NuxtLink
v-if="comment.medal"
@@ -157,6 +158,12 @@ const lightboxImgs = ref([])
const loggedIn = computed(() => authState.loggedIn)
const countReplies = (list) => list.reduce((sum, r) => sum + 1 + countReplies(r.reply || []), 0)
const replyCount = computed(() => countReplies(props.comment.reply || []))
const isCommentFromPostAuthor = computed(() => {
if (props.comment.userId == null || props.postAuthorId == null) {
return false
}
return String(props.comment.userId) === String(props.postAuthorId)
})
const toggleReplies = () => {
showReplies.value = !showReplies.value
@@ -426,6 +433,21 @@ const handleContentClick = (e) => {
color: var(--text-color);
}
.op-badge {
display: inline-flex;
align-items: center;
justify-content: center;
margin-left: 6px;
padding: 0 6px;
height: 18px;
border-radius: 9px;
background-color: rgba(242, 100, 25, 0.12);
color: #f26419;
font-size: 12px;
font-weight: 600;
line-height: 1;
}
.medal-icon {
font-size: 12px;
opacity: 0.6;

View File

@@ -297,6 +297,7 @@ export default {
border: 1px solid var(--normal-border-color);
border-radius: 5px;
padding: 5px 10px;
margin-bottom: 4px;
cursor: pointer;
display: flex;
justify-content: space-between;
@@ -315,8 +316,9 @@ export default {
right: 0;
background: var(--background-color);
border: 1px solid var(--normal-border-color);
border-radius: 5px;
z-index: 10000;
max-height: 200px;
max-height: 300px;
min-width: 350px;
overflow-y: auto;
}

View File

@@ -26,9 +26,20 @@
<div class="search-option-item">
<component :is="iconMap[option.type]" class="result-icon" />
<div class="result-body">
<div class="result-main" v-html="highlight(option.text)"></div>
<div v-if="option.subText" class="result-sub" v-html="highlight(option.subText)"></div>
<div v-if="option.extra" class="result-extra" v-html="highlight(option.extra)"></div>
<div
class="result-main"
v-html="renderHighlight(option.highlightedText, option.text)"
></div>
<div
v-if="option.subText"
class="result-sub"
v-html="renderHighlight(option.highlightedSubText, option.subText)"
></div>
<div
v-if="option.extra"
class="result-extra"
v-html="renderHighlight(option.highlightedExtra, option.extra)"
></div>
</div>
</div>
</template>
@@ -70,16 +81,30 @@ const fetchResults = async (kw) => {
subText: r.subText,
extra: r.extra,
postId: r.postId,
highlightedText: r.highlightedText,
highlightedSubText: r.highlightedSubText,
highlightedExtra: r.highlightedExtra,
}))
return results.value
}
const highlight = (text) => {
text = stripMarkdown(text)
if (!keyword.value) return text
const reg = new RegExp(keyword.value, 'gi')
const res = text.replace(reg, (m) => `<span class="highlight">${m}</span>`)
return res
const escapeHtml = (value = '') =>
String(value)
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#39;')
const renderHighlight = (highlighted, fallback) => {
if (highlighted) {
return highlighted
}
const plain = stripMarkdown(fallback || '')
if (!plain) {
return ''
}
return escapeHtml(plain)
}
const iconMap = {
@@ -168,7 +193,7 @@ defineExpose({
padding: 10px 20px;
}
:deep(.highlight) {
:deep(mark) {
color: var(--primary-color);
}

View File

@@ -32,11 +32,14 @@
:disable-link="true"
/>
<div class="result-body">
<div class="result-main" v-html="highlight(option.username)"></div>
<div
class="result-main"
v-html="renderHighlight(option.highlightedUsername, option.username)"
></div>
<div
v-if="option.introduction"
class="result-sub"
v-html="highlight(option.introduction)"
v-html="renderHighlight(option.highlightedIntroduction, option.introduction)"
></div>
</div>
</div>
@@ -79,15 +82,29 @@ const fetchResults = async (kw) => {
username: u.username,
avatar: u.avatar,
introduction: u.introduction,
highlightedUsername: u.highlightedText,
highlightedIntroduction: u.highlightedSubText || u.highlightedExtra,
}))
return results.value
}
const highlight = (text) => {
text = stripMarkdown(text || '')
if (!keyword.value) return text
const reg = new RegExp(keyword.value, 'gi')
return text.replace(reg, (m) => `<span class="highlight">${m}</span>`)
const escapeHtml = (value = '') =>
String(value)
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#39;')
const renderHighlight = (highlighted, fallback) => {
if (highlighted) {
return highlighted
}
const plain = stripMarkdown(fallback || '')
if (!plain) {
return ''
}
return escapeHtml(plain)
}
watch(selected, async (val) => {
@@ -170,7 +187,7 @@ defineExpose({
padding: 10px 20px;
}
:deep(.highlight) {
:deep(mark) {
color: var(--primary-color);
}

View File

@@ -71,6 +71,16 @@ export default {
label: '隐私政策',
file: 'https://openisle-1307107697.cos.ap-guangzhou.myqcloud.com/assert/about/privacy.md',
},
{
key: 'points',
label: '积分说明',
content: `# 积分说明
- 积分可用于兑换商品、参与抽奖等社区玩法。
- 管理员可以通过后台新增的积分模块为用户发放奖励积分。
- 每次发放都会记录在积分历史中,方便你查看积分来源。
`,
},
{
key: 'api',
label: 'API与调试',
@@ -88,11 +98,21 @@ export default {
return `${token.value.slice(0, 20)}...${token.value.slice(-10)}`
})
const loadContent = async (file) => {
if (!file) return
const loadContent = async (tab) => {
if (!tab || tab.key === 'api') return
if (tab.content) {
isFetching.value = false
content.value = tab.content
return
}
if (!tab.file) {
isFetching.value = false
content.value = ''
return
}
try {
isFetching.value = true
const res = await fetch(file)
const res = await fetch(tab.file)
if (res.ok) {
content.value = await res.text()
} else {
@@ -110,15 +130,15 @@ export default {
if (initTab && tabs.find((t) => t.key === initTab)) {
selectedTab.value = initTab
const tab = tabs.find((t) => t.key === initTab)
if (tab && tab.file) loadContent(tab.file)
if (tab) loadContent(tab)
} else {
loadContent(tabs[0].file)
loadContent(tabs[0])
}
})
watch(selectedTab, (name) => {
const tab = tabs.find((t) => t.key === name)
if (tab && tab.file) loadContent(tab.file)
if (tab) loadContent(tab)
router.replace({ query: { ...route.query, tab: name } })
})
@@ -127,6 +147,8 @@ export default {
(name) => {
if (name && name !== selectedTab.value && tabs.find((t) => t.key === name)) {
selectedTab.value = name
const tab = tabs.find((t) => t.key === name)
if (tab) loadContent(tab)
}
},
)

View File

@@ -85,7 +85,7 @@
</div>
<div class="article-member-avatars-container">
<div v-for="member in article.members">
<div v-for="member in article.members" class="article-member-avatar-item">
<BaseUserAvatar
class="article-member-avatar-item-img"
:src="member.avatar"

View File

@@ -184,6 +184,16 @@
}}</NuxtLink>
参与获得 {{ item.amount }} 积分
</template>
<template v-else-if="item.type === 'ADMIN_GRANT' && item.fromUserId">
管理员
<NuxtLink :to="`/users/${item.fromUserId}`" class="timeline-link">{{
item.fromUserName
}}</NuxtLink>
赠送了 {{ item.amount }} 积分
</template>
<template v-else-if="item.type === 'ADMIN_GRANT'">
管理员赠送了 {{ item.amount }} 积分
</template>
<template v-else-if="item.type === 'SYSTEM_ONLINE'"> 积分历史系统上线 </template>
<paper-money-two /> 你目前的积分是 {{ item.balance }}
</div>
@@ -229,6 +239,7 @@ const pointRules = [
'评论被点赞:每次 10 积分',
'邀请好友加入可获得 500 积分/次,注意需要使用邀请链接注册',
'文章被收录至精选:每次 500 积分',
'管理员赠送:特殊活动可由管理员手动赠送积分',
]
const goods = ref([])
@@ -250,6 +261,7 @@ const iconMap = {
LOTTERY_REWARD: 'fireworks',
POST_LIKE_CANCELLED: 'clear-icon',
COMMENT_LIKE_CANCELLED: 'clear-icon',
ADMIN_GRANT: 'paper-money-two',
}
const loadTrend = async () => {

View File

@@ -65,6 +65,35 @@
<div class="setting-title">注册模式</div>
<Dropdown v-model="registerMode" :fetch-options="fetchRegisterModes" />
</div>
<div class="form-row grant-row">
<div class="setting-title">发放积分</div>
<div class="grant-form">
<BaseInput
v-model="grantUsername"
placeholder="请输入用户名"
class="grant-input"
@input="grantError = ''"
/>
<BaseInput
v-model="grantAmount"
type="number"
placeholder="积分数量"
class="grant-input amount"
@input="grantError = ''"
/>
<button
type="button"
class="grant-button"
:class="{ disabled: isGrantingPoints }"
:disabled="isGrantingPoints"
@click="grantPoint"
>
{{ isGrantingPoints ? '发放中...' : '发放' }}
</button>
</div>
<div v-if="grantError" class="grant-error-message">{{ grantError }}</div>
<div class="setting-description">积分会立即发放给目标用户并记录在积分历史中</div>
</div>
</div>
<div class="buttons">
<div v-if="isSaving" class="save-button disabled">保存中...</div>
@@ -102,6 +131,10 @@ const registerMode = ref('DIRECT')
const isLoadingPage = ref(false)
const isSaving = ref(false)
const frosted = ref(true)
const grantUsername = ref('')
const grantAmount = ref('')
const grantError = ref('')
const isGrantingPoints = ref(false)
onMounted(async () => {
isLoadingPage.value = true
@@ -184,6 +217,55 @@ const loadAdminConfig = async () => {
// ignore
}
}
const grantPoint = async () => {
if (isGrantingPoints.value) return
const username = grantUsername.value.trim()
if (!username) {
grantError.value = '用户名不能为空'
toast.error(grantError.value)
return
}
const amount = Number(grantAmount.value)
if (!Number.isInteger(amount) || amount <= 0) {
grantError.value = '积分数量必须为正整数'
toast.error(grantError.value)
return
}
isGrantingPoints.value = true
try {
const token = getToken()
const res = await fetch(`${API_BASE_URL}/api/admin/points/grant`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({ username, amount }),
})
let data = null
try {
data = await res.json()
} catch (e) {
// ignore body parse errors
}
if (res.ok) {
toast.success(`已为 ${username} 发放 ${amount} 积分`)
grantUsername.value = ''
grantAmount.value = ''
grantError.value = ''
} else {
const message = data?.error || '发放失败'
grantError.value = message
toast.error(message)
}
} catch (e) {
grantError.value = '发放失败,请稍后再试'
toast.error(grantError.value)
} finally {
isGrantingPoints.value = false
}
}
const save = async () => {
isSaving.value = true
@@ -323,6 +405,51 @@ const save = async () => {
max-width: 200px;
}
.grant-row {
max-width: 100%;
}
.grant-form {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.grant-input {
flex: 1 1 180px;
}
.grant-input.amount {
max-width: 140px;
}
.grant-button {
background-color: var(--primary-color);
color: #fff;
border: none;
border-radius: 8px;
padding: 8px 16px;
cursor: pointer;
transition: background-color 0.2s ease-in-out;
}
.grant-button.disabled,
.grant-button:disabled {
cursor: not-allowed;
background-color: var(--primary-color-disabled);
}
.grant-button:not(.disabled):hover {
background-color: var(--primary-color-hover);
}
.grant-error-message {
color: red;
font-size: 14px;
margin-top: 8px;
}
.switch-row {
flex-direction: row;
align-items: center;