spring 写入 customConsumerFactory 和 customKafkaListenerContainerFactory 时未自动加载 kafka 属性

spring kafka properties not auto loaded when writing customConsumerFactory and customKafkaListenerContainerFactory

我想从 application.properties 加载我的 spring-kafka 属性,并且必须使用 spring 自动配置加载。我的问题是由以下原因引起的:java.lang.IllegalStateException:没有可用的确认作为参数,侦听器容器必须有一个 MANUAL AckMode 来填充确认但是我已经在属性文件 spring.kafka.listener.ack-mode=manual- 中设置了它立即在此属性中但是因为它是我的自定义 fooKafkaListenerContainerFactory 它无法选择此设置。我想要的是无需手动设置,它应该从我的 application.properies 中获取。 @Gary Russell 感谢您的帮助。

我的代码如下所示

package com.foo;

import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;

import com.foo.FooKafkaDTO;

@Configuration
public class KafkaConsumerConfig {

    @Autowired
    private KafkaProperties kafkaProperties;

    @Bean
    @ConditionalOnMissingBean(ConsumerFactory.class)
    public ConsumerFactory<?, ?> kafkaConsumerFactory() {

        return new DefaultKafkaConsumerFactory<>(kafkaProperties.buildConsumerProperties());
    }

    @Bean
    @ConditionalOnMissingBean(name = "kafkaListenerContainerFactory")
    public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
            ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
            ConsumerFactory<Object, Object> kafkaConsumerFactory) {

        ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<Object, Object>();
        configurer.configure(factory, kafkaConsumerFactory);
        return factory;
    }

    @Bean
    public ConsumerFactory<String, FooKafkaDTO> fooConsumerFactory() {

        return new DefaultKafkaConsumerFactory<>(
                kafkaProperties.buildConsumerProperties(), new StringDeserializer(), new JsonDeserializer<>(FooKafkaDTO.class));
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, FooKafkaDTO> fooKafkaListenerContainerFactory(
            ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
            ConsumerFactory<String, FooKafkaDTO> fooConsumerFactory) {

        ConcurrentKafkaListenerContainerFactory<String, FooKafkaDTO> factory =
                new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(fooConsumerFactory());
        return factory;
    }
}


Here are my properties 

spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.listener.ack-mode=manual-immediate
spring.kafka.consumer.group-id=group_id
spring.kafka.consumer.auto-offset-reset=latest
spring.kafka.consumer.enable.auto.commit=false
spring.kafka.consumer.key-deserialize=org.springframework.kafka.support.serializer.JsonDeserializer
spring.kafka.consumer.value-deserialize=org.springframework.kafka.support.serializer.JsonDeserializer


Here is my listener

@Service
public class Consumer {

    private static final Log LOG = LogFactory.getLog(Consumer.class);

    @KafkaListener(
            topicPartitions = {@TopicPartition(topic = "outbox.foo",
                    partitionOffsets = @PartitionOffset(partition = "0", initialOffset = "0"))},
            groupId = "group_id",
            containerFactory = "fooKafkaListenerContainerFactory")
    public void consume(@Payload FooKafkaDTO fooKafkaDTO, Acknowledgment acknowledgment,
            @Headers MessageHeaders headers) {

        LOG.info("offset:::" + Long.valueOf(headers.get(KafkaHeaders.OFFSET).toString()));
        LOG.info(String.format("$$ -> Consumed Message -> %s", fooKafkaDTO));
        acknowledgment.acknowledge();

    }
}

在阅读 spring-kafka spring-kafka-official-documentation 的文档之后!我可以找到取代整个样板代码的这段代码。我简化了我的 KafkaConsumerConfig class,现在看起来像下面这样。

package com.foo

import java.util.Map;

import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;

import com.foo.FooKafkaDTO;

@Configuration
public class KafkaConsumerConfig {

    @Bean
    public DefaultKafkaConsumerFactory fooDTOConsumerFactory(KafkaProperties properties) {

        Map<String, Object> props = properties.buildConsumerProperties();
        return new DefaultKafkaConsumerFactory(props,
                new JsonDeserializer<>(String.class)
                        .forKeys()
                        .ignoreTypeHeaders(),
                new JsonDeserializer<>(FooKafkaDTO.class)
                        .ignoreTypeHeaders());

    }
}