集群同步-》同步消费位点

This commit is contained in:
许晓东
2021-10-25 00:10:08 +08:00
parent 66e7ea0676
commit 5ccf9013e5
13 changed files with 233 additions and 6 deletions

View File

@@ -0,0 +1,22 @@
package com.xuxd.kafka.console.beans.dto;
import java.util.Properties;
import lombok.Data;
/**
* kafka-console-ui.
*
* @author xuxd
* @date 2021-10-24 23:10:47
**/
@Data
public class SyncDataDTO {
private String address;
private String groupId;
private String topic;
private Properties properties = new Properties();
}

View File

@@ -5,6 +5,7 @@ import kafka.console.ConfigConsole;
import kafka.console.ConsumerConsole;
import kafka.console.KafkaAclConsole;
import kafka.console.KafkaConfigConsole;
import kafka.console.OperationConsole;
import kafka.console.TopicConsole;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -47,4 +48,10 @@ public class KafkaConfiguration {
public ConfigConsole configConsole(KafkaConfig config) {
return new ConfigConsole(config);
}
@Bean
public OperationConsole operationConsole(KafkaConfig config, TopicConsole topicConsole,
ConsumerConsole consumerConsole) {
return new OperationConsole(config, topicConsole, consumerConsole);
}
}

View File

@@ -111,4 +111,9 @@ public class ConsumerController {
public Object getGroupIdList() {
return consumerService.getGroupIdList();
}
@GetMapping("/topic/list")
public Object getSubscribeTopicList(@RequestParam String groupId) {
return consumerService.getSubscribeTopicList(groupId);
}
}

View File

@@ -0,0 +1,30 @@
package com.xuxd.kafka.console.controller;
import com.xuxd.kafka.console.beans.dto.SyncDataDTO;
import com.xuxd.kafka.console.service.OperationService;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* kafka-console-ui.
*
* @author xuxd
* @date 2021-10-24 23:13:28
**/
@RestController
@RequestMapping("/op")
public class OperationController {
@Autowired
private OperationService operationService;
@PostMapping("/sync/consumer/offset")
public Object syncConsumerOffset(@RequestBody SyncDataDTO dto) {
dto.getProperties().put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, dto.getAddress());
return operationService.syncConsumerOffset(dto.getGroupId(), dto.getTopic(), dto.getProperties());
}
}

View File

@@ -30,4 +30,6 @@ public interface ConsumerService {
ResponseData resetPartitionToTargetOffset(String groupId, TopicPartition partition, long offset);
ResponseData getGroupIdList();
ResponseData getSubscribeTopicList(String groupId);
}

View File

@@ -0,0 +1,15 @@
package com.xuxd.kafka.console.service;
import com.xuxd.kafka.console.beans.ResponseData;
import java.util.Properties;
/**
* kafka-console-ui.
*
* @author xuxd
* @date 2021-10-24 23:12:43
**/
public interface OperationService {
ResponseData syncConsumerOffset(String groupId, String topic, Properties thatProps);
}

View File

@@ -138,4 +138,8 @@ public class ConsumerServiceImpl implements ConsumerService {
Set<String> stateGroup = consumerConsole.getConsumerGroupIdList(null);
return ResponseData.create().data(stateGroup).success();
}
@Override public ResponseData getSubscribeTopicList(String groupId) {
return ResponseData.create().data(consumerConsole.listSubscribeTopics(groupId).keySet()).success();
}
}

View File

@@ -0,0 +1,29 @@
package com.xuxd.kafka.console.service.impl;
import com.xuxd.kafka.console.beans.ResponseData;
import com.xuxd.kafka.console.service.OperationService;
import java.util.Properties;
import kafka.console.OperationConsole;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import scala.Tuple2;
/**
* kafka-console-ui.
*
* @author xuxd
* @date 2021-10-24 23:12:54
**/
@Service
public class OperationServiceImpl implements OperationService {
@Autowired
private OperationConsole operationConsole;
@Override public ResponseData syncConsumerOffset(String groupId, String topic, Properties thatProps) {
Tuple2<Object, String> tuple2 = operationConsole.syncConsumerOffset(groupId, topic, thatProps);
return (boolean) tuple2._1() ? ResponseData.create().success() : ResponseData.create().failed(tuple2._2());
}
}

View File

@@ -173,6 +173,18 @@ class ConsumerConsole(config: KafkaConfig) extends KafkaConsole(config: KafkaCon
}).asInstanceOf[(Boolean, String)]
}
def listSubscribeTopics(groupId: String): util.Map[String, util.List[TopicPartition]] = {
val commitOffs = getCommittedOffsets(groupId)
val map: util.Map[String, util.List[TopicPartition]] = new util.HashMap[String, util.List[TopicPartition]]()
for (t <- commitOffs.keySet) {
if (!map.containsKey(t.topic())) {
map.put(t.topic(), new util.ArrayList[TopicPartition]())
}
map.get(t.topic()).add(t)
}
map
}
private def describeConsumerGroups(groupIds: util.Set[String]): mutable.Map[String, ConsumerGroupDescription] = {
withAdminClientAndCatchError(admin => {
admin.describeConsumerGroups(groupIds).describedGroups().asScala.map {

View File

@@ -65,6 +65,10 @@ class KafkaConsole(config: KafkaConfig) {
}
}
protected def createAdminClient(props: Properties): Admin = {
Admin.create(props)
}
private def createAdminClient(): Admin = {
Admin.create(getProps())
}

View File

@@ -0,0 +1,39 @@
package kafka.console
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.xuxd.kafka.console.config.KafkaConfig
import scala.jdk.CollectionConverters.{ListHasAsScala, MapHasAsScala}
/**
* kafka-console-ui.
*
* @author xuxd
* @date 2021-10-24 23:23:30
* */
class OperationConsole(config: KafkaConfig, topicConsole: TopicConsole,
consumerConsole: ConsumerConsole) extends KafkaConsole(config: KafkaConfig) with Logging {
def syncConsumerOffset(groupId: String, topic: String, props: Properties): (Boolean, String) = {
val thatAdmin = createAdminClient(props)
try {
val thisTopicPartitions = consumerConsole.listSubscribeTopics(groupId).get(topic).asScala.sortBy(_.partition())
val thatTopicPartitions = thatAdmin.listConsumerGroupOffsets(
groupId
).partitionsToOffsetAndMetadata.get(timeoutMs, TimeUnit.MILLISECONDS).asScala.filter(_._1.topic().equals(topic)).keySet.toList.sortBy(_.partition())
if (thatTopicPartitions != thisTopicPartitions) {
throw new IllegalStateException("topic partition inconsistent.")
}
} catch {
case ex => throw ex
} finally {
thatAdmin.close()
}
(true, "")
}
}

View File

@@ -112,6 +112,10 @@ export const KafkaConsumerApi = {
url: "/consumer/group/id/list",
method: "get",
},
getSubscribeTopicList: {
url: "/consumer/topic/list",
method: "get",
},
};
export const KafkaClusterApi = {
@@ -120,3 +124,10 @@ export const KafkaClusterApi = {
method: "get",
},
};
export const KafkaOpApi = {
syncConsumerOffset: {
url: "/op/sync/consumer/offset",
method: "post",
},
};

View File

@@ -19,6 +19,7 @@
>
<a-form-item label="消费组">
<a-select
@change="handleGroupChange"
show-search
option-filter-prop="children"
v-decorator="[
@@ -58,9 +59,16 @@
placeholder="输入待同步kafka地址"
/>
</a-form-item>
<a-form-item label="kafka属性">
<a-textarea
rows="5"
placeholder="除了地址其它kafka属性配置
request.timeout.ms=6000"
v-decorator="['properties']"
/>
</a-form-item>
<a-form-item :wrapper-col="{ span: 12, offset: 5 }">
<a-button type="primary" html-type="submit"> 提交 </a-button>
<a-button type="primary" html-type="submit"> 提交</a-button>
</a-form-item>
</a-form>
</a-spin>
@@ -70,8 +78,9 @@
<script>
import request from "@/utils/request";
import { KafkaConsumerApi } from "@/utils/api";
import { KafkaConsumerApi, KafkaOpApi } from "@/utils/api";
import notification from "ant-design-vue/es/notification";
export default {
name: "SyncConsumerOffset",
props: {
@@ -122,10 +131,30 @@ export default {
e.preventDefault();
this.form.validateFields((err, values) => {
if (!err) {
if (values.properties) {
const properties = {};
values.properties.split("\n").forEach((e) => {
const c = e.split("=");
c.split;
if (c.length > 1) {
let k = c[0].trim(),
v = c[1].trim();
for (let j = 2; j < c.length; j++) {
v += ("=" + c[j]);
}
if (k && v) {
properties[k] = v;
}
}
});
values.properties = properties;
} else {
values.properties = {};
}
this.loading = true;
request({
url: KafkaConsumerApi.addSubscription.url,
method: KafkaConsumerApi.addSubscription.method,
url: KafkaOpApi.syncConsumerOffset.url,
method: KafkaOpApi.syncConsumerOffset.method,
data: values,
}).then((res) => {
this.loading = false;
@@ -143,9 +172,27 @@ export default {
});
},
handleCancel() {
this.data = [];
this.groupIdList = [];
this.topicList = [];
this.$emit("closeSyncConsumerOffsetDialog", { refresh: false });
},
handleGroupChange(groupId) {
this.loading = true;
request({
url: KafkaConsumerApi.getSubscribeTopicList.url + "?groupId=" + groupId,
method: KafkaConsumerApi.getSubscribeTopicList.method,
}).then((res) => {
this.loading = false;
if (res.code == 0) {
this.topicList = res.data;
} else {
notification.error({
message: "error",
description: res.msg,
});
}
});
},
},
};
</script>