|
@@ -0,0 +1,208 @@
|
|
|
+package com.css.simulation.resource.util;
|
|
|
+
|
|
|
+import lombok.SneakyThrows;
|
|
|
+import lombok.extern.slf4j.Slf4j;
|
|
|
+import org.apache.kafka.clients.admin.Admin;
|
|
|
+import org.apache.kafka.clients.admin.NewTopic;
|
|
|
+import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|
|
+import org.apache.kafka.clients.producer.KafkaProducer;
|
|
|
+import org.apache.kafka.clients.producer.ProducerRecord;
|
|
|
+import org.apache.kafka.clients.producer.RecordMetadata;
|
|
|
+
|
|
|
+import java.util.Arrays;
|
|
|
+import java.util.Collections;
|
|
|
+import java.util.concurrent.Future;
|
|
|
+
|
|
|
+@Slf4j
|
|
|
+public class ApacheKafkaUtil {
|
|
|
+
|
|
|
+ //* -------------------------------- Admin --------------------------------
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 创建主题
|
|
|
+ *
|
|
|
+ * @param admin 管理员对象
|
|
|
+ * @param name 主题名
|
|
|
+ * @param numPartitions 分区数量
|
|
|
+ * @param replicationFactor 副本数量
|
|
|
+ */
|
|
|
+ public static void createTopic(Admin admin, String name, int numPartitions, short replicationFactor) {
|
|
|
+ NewTopic newTopic = new NewTopic(name, numPartitions, replicationFactor);
|
|
|
+ admin.createTopics(Collections.singleton(newTopic));
|
|
|
+ log.info("ApacheKafkaUtil--createTopic 创建主题 " + name + ",分区数为:" + numPartitions + ",副本数为:" + replicationFactor);
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 删除主题
|
|
|
+ *
|
|
|
+ * @param admin 管理员对象
|
|
|
+ * @param topics 需要删除的所有主题序列
|
|
|
+ */
|
|
|
+ public static void deleteTopic(Admin admin, String... topics) {
|
|
|
+ admin.deleteTopics(Arrays.asList(topics));
|
|
|
+ log.info("ApacheKafkaUtil.deleteTopic() 删除主题:" + Arrays.toString(topics));
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ //* -------------------------------- Producer --------------------------------
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 默认用异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void send(KafkaProducer<String, String> kafkaProducer, String topic, String value) {
|
|
|
+ sendAsync(kafkaProducer, topic, value);
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 默认用异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void send(KafkaProducer<String, String> kafkaProducer, String topic, String key, String value) {
|
|
|
+ sendAsync(kafkaProducer, topic, key, value);
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 默认用异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void send(KafkaProducer<String, String> kafkaProducer, String topic, int partition, String key, String value) {
|
|
|
+ sendAsync(kafkaProducer, topic, partition, key, value);
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void sendAsync(KafkaProducer<String, String> kafkaProducer, String topic, String value) {
|
|
|
+ Future<RecordMetadata> send = kafkaProducer.send(new ProducerRecord<>(topic, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ );
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void sendAsync(KafkaProducer<String, String> kafkaProducer, String topic, String key, String value) {
|
|
|
+ Future<RecordMetadata> send = kafkaProducer.send(new ProducerRecord<>(topic, key, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ );
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ public static void sendAsync(KafkaProducer<String, String> kafkaProducer, String topic, int partition, String key, String value) {
|
|
|
+ Future<RecordMetadata> send = kafkaProducer.send(new ProducerRecord<>(topic, partition, key, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ );
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 同步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ * @return 元信息
|
|
|
+ */
|
|
|
+ @SneakyThrows
|
|
|
+ public static RecordMetadata sendSync(KafkaProducer<String, String> kafkaProducer, String topic, String value) {
|
|
|
+ return kafkaProducer.send(new ProducerRecord<>(topic, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ ).get();
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ @SneakyThrows
|
|
|
+ public static RecordMetadata sendSync(KafkaProducer<String, String> kafkaProducer, String topic, String key, String value) {
|
|
|
+ return kafkaProducer.send(new ProducerRecord<>(topic, key, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ ).get();
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 异步发送
|
|
|
+ *
|
|
|
+ * @param kafkaProducer 生产者对象
|
|
|
+ * @param topic 主题
|
|
|
+ * @param value 值
|
|
|
+ */
|
|
|
+ @SneakyThrows
|
|
|
+ public static RecordMetadata sendSync(KafkaProducer<String, String> kafkaProducer, String topic, int partition, String key, String value) {
|
|
|
+// kafkaProducer.send(new ProducerRecord<>(topic, value));
|
|
|
+ return kafkaProducer.send(new ProducerRecord<>(topic, partition, key, value), (recordMetadata, e) -> {
|
|
|
+ if (e == null) {
|
|
|
+ log.info("ApacheKafkaUtil--send 主题:" + recordMetadata.topic() + ",分区:" + recordMetadata.partition());
|
|
|
+ } else {
|
|
|
+ log.error("ApacheKafkaUtil--send 发送失败,主题:" + topic + ",值:" + value);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ ).get();
|
|
|
+ }
|
|
|
+
|
|
|
+ //* -------------------------------- Consumer --------------------------------
|
|
|
+
|
|
|
+ public static void commitAsync(KafkaConsumer<String, String> kafkaConsumer) {
|
|
|
+ kafkaConsumer.commitAsync();
|
|
|
+ }
|
|
|
+
|
|
|
+ public static void commitSync(KafkaConsumer<String, String> kafkaConsumer) {
|
|
|
+ kafkaConsumer.commitSync();
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+}
|