Spring 引导 REST 服务在运行时在控制器内设置 Kafka 主题属性
Spring boot REST service set Kafka topic properties at runtime inside the controller
我是 Spring boot 和 kafka 的新手。我有简单的休息服务,调用时会消耗来自 kafka 主题的消息。有一个自定义配置 class,控制器。想知道如何在运行时设置属性,例如MAX_POLL_RECORDS_CONFIG,AUTO_OFFSET_RESET_CONFIG 等。我看到 KafkaConfig 只在启动时执行。我如何在运行时设置上述属性,即基于一些查询参数想要设置这些属性,关于如何在运行时在 getMessagesFromKafkaTopic 方法中执行此操作的任何想法。这是我的控制器,kafkaConfig
KafkaConfig.java
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@Configuration
@EnableKafka
public class KafkaConfig {
@Value("${kafka.con.server}")
private String server;
@Value("${kafka.con.groupid}")
private String gid;
@Value("${kafka.con.enablecommit}")
private String enablecommit;
@Value("${kafka.con.ommitinterval}")
private String commitint;
@Value("${kafka.con.sessiontimeout}")
private String timeout;
@Value("${kafka.con.maxrecordspoll}")
private String maxPollRecords;
@Value("${kafka.con.offsetReset}")
private String offsetReset;
/**
* ConsumerFactory
* @return
*/
@Bean
public ConsumerFactory<Object, Object> consumerFactory(){
Map<String, Object> configs = new HashMap<String, Object>();
configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
configs.put(ConsumerConfig.GROUP_ID_CONFIG, gid);
configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enablecommit);
configs.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, commitint);
configs.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, timeout);
configs.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetReset);
configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
return new DefaultKafkaConsumerFactory<Object, Object>(configs);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
KafkaConsumerController.java
import java.text.Format;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Collections;
import java.util.Date;
import java.util.Properties;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping(value = "/kafka")
public class KafkaConsumerController {
@Autowired
private ConsumerFactory<Object, Object> consumerFactory;
@Autowired
private ConcurrentKafkaListenerContainerFactory concurrentKafkaListenerContainerFactory;
@RequestMapping(value = "/consume", method = RequestMethod.GET)
public void getMessagesFromKafkaTopic(@RequestParam("batchsize") int batchsize) {
System.out.println("********* batchsize " + batchsize);
//here would like to set MAX_POLL_RECORDS_CONFIG to the value coming in as param
Consumer<Object, Object> con = null;
try {
con = consumerFactory.createConsumer();
con.subscribe(Collections.singleton("demotopic"));
int count = 0;
while (true) {
ConsumerRecords<Object, Object> records = con.poll(Duration.ofSeconds(3));
System.out.println("****** Record Count ******* : " + records.count());
if(records.count() == 0) {
count++;
if(count > 3) {
break;
}
else
continue;
}
for (ConsumerRecord<Object, Object> record : records) {
System.out.println("Message: " + record.value());
System.out.println("Message offset: " + record.offset());
System.out.println("Message headers: " + record.timestamp());
Date date = new Date(record.timestamp());
Format format = new SimpleDateFormat("yyyy MM dd HH:mm:ss.SSS");
System.out.println("Message date: " + format.format(date));
}
con.commitSync();
}
} catch (Exception e) {
e.printStackTrace();
}finally {
con.close();
}
System.out.println("********* END **********");
}
}
application.properties
kafka.con.server=localhost:9092
kafka.con.groupid=my-first-consumer-group
kafka.con.enablecommit=false
kafka.con.ommitinterval=1000
kafka.con.sessiontimeout=30000
kafka.con.maxrecordspoll=5
kafka.con.offsetReset=earliest
而不是 con = consumerFactory.createConsumer();
使用:
/**
* Create a consumer with an explicit group id; in addition, the
* client id suffix is appended to the clientIdPrefix which overrides the
* {@code client.id} property, if present. In addition, consumer properties can
* be overridden if the factory implementation supports it.
* @param groupId the group id.
* @param clientIdPrefix the prefix.
* @param clientIdSuffix the suffix.
* @param properties the properties to override.
* @return the consumer.
* @since 2.2.4
*/
Consumer<K, V> createConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable String clientIdSuffix, @Nullable Properties properties);
这允许您通过 properties
参数覆盖消费者属性。
我是 Spring boot 和 kafka 的新手。我有简单的休息服务,调用时会消耗来自 kafka 主题的消息。有一个自定义配置 class,控制器。想知道如何在运行时设置属性,例如MAX_POLL_RECORDS_CONFIG,AUTO_OFFSET_RESET_CONFIG 等。我看到 KafkaConfig 只在启动时执行。我如何在运行时设置上述属性,即基于一些查询参数想要设置这些属性,关于如何在运行时在 getMessagesFromKafkaTopic 方法中执行此操作的任何想法。这是我的控制器,kafkaConfig
KafkaConfig.java
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@Configuration
@EnableKafka
public class KafkaConfig {
@Value("${kafka.con.server}")
private String server;
@Value("${kafka.con.groupid}")
private String gid;
@Value("${kafka.con.enablecommit}")
private String enablecommit;
@Value("${kafka.con.ommitinterval}")
private String commitint;
@Value("${kafka.con.sessiontimeout}")
private String timeout;
@Value("${kafka.con.maxrecordspoll}")
private String maxPollRecords;
@Value("${kafka.con.offsetReset}")
private String offsetReset;
/**
* ConsumerFactory
* @return
*/
@Bean
public ConsumerFactory<Object, Object> consumerFactory(){
Map<String, Object> configs = new HashMap<String, Object>();
configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
configs.put(ConsumerConfig.GROUP_ID_CONFIG, gid);
configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enablecommit);
configs.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, commitint);
configs.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, timeout);
configs.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetReset);
configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
return new DefaultKafkaConsumerFactory<Object, Object>(configs);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
KafkaConsumerController.java
import java.text.Format;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Collections;
import java.util.Date;
import java.util.Properties;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping(value = "/kafka")
public class KafkaConsumerController {
@Autowired
private ConsumerFactory<Object, Object> consumerFactory;
@Autowired
private ConcurrentKafkaListenerContainerFactory concurrentKafkaListenerContainerFactory;
@RequestMapping(value = "/consume", method = RequestMethod.GET)
public void getMessagesFromKafkaTopic(@RequestParam("batchsize") int batchsize) {
System.out.println("********* batchsize " + batchsize);
//here would like to set MAX_POLL_RECORDS_CONFIG to the value coming in as param
Consumer<Object, Object> con = null;
try {
con = consumerFactory.createConsumer();
con.subscribe(Collections.singleton("demotopic"));
int count = 0;
while (true) {
ConsumerRecords<Object, Object> records = con.poll(Duration.ofSeconds(3));
System.out.println("****** Record Count ******* : " + records.count());
if(records.count() == 0) {
count++;
if(count > 3) {
break;
}
else
continue;
}
for (ConsumerRecord<Object, Object> record : records) {
System.out.println("Message: " + record.value());
System.out.println("Message offset: " + record.offset());
System.out.println("Message headers: " + record.timestamp());
Date date = new Date(record.timestamp());
Format format = new SimpleDateFormat("yyyy MM dd HH:mm:ss.SSS");
System.out.println("Message date: " + format.format(date));
}
con.commitSync();
}
} catch (Exception e) {
e.printStackTrace();
}finally {
con.close();
}
System.out.println("********* END **********");
}
}
application.properties
kafka.con.server=localhost:9092
kafka.con.groupid=my-first-consumer-group
kafka.con.enablecommit=false
kafka.con.ommitinterval=1000
kafka.con.sessiontimeout=30000
kafka.con.maxrecordspoll=5
kafka.con.offsetReset=earliest
而不是 con = consumerFactory.createConsumer();
使用:
/**
* Create a consumer with an explicit group id; in addition, the
* client id suffix is appended to the clientIdPrefix which overrides the
* {@code client.id} property, if present. In addition, consumer properties can
* be overridden if the factory implementation supports it.
* @param groupId the group id.
* @param clientIdPrefix the prefix.
* @param clientIdSuffix the suffix.
* @param properties the properties to override.
* @return the consumer.
* @since 2.2.4
*/
Consumer<K, V> createConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable String clientIdSuffix, @Nullable Properties properties);
这允许您通过 properties
参数覆盖消费者属性。