在非 spring 托管 Web 应用程序中将属性注入 spring 启动自动配置 类
Injecting properties to spring boot auto-configure classes in a non-spring managed web application
我已经使用 Spring Boot 创建了一个用作 kafka 客户端的库,基本上库中只有 类 每个都用 @SpringBootConfiguration
注释
和 @EnableAutoConfiguration
注释。
@Slf4j
@SpringBootConfiguration
@EnableAutoConfiguration
public class KafkaHandlerConfiguration {
...
}
和
@Service
interface SwiftalkKafkaGateway {
...
}
我为它创建了一个带有依赖项的 jar,这个 JAR 将通过 CDI 在 Java EE webapp 中使用。
我将通过此代码在 CDI 上下文中获取 bean
@Singleton
@ApplicationScoped
class SwiftalkAnnotatedSpringContextLoader {
private final AnnotationConfigApplicationContext springContext;
SwiftalkAnnotatedSpringContextLoader() {
springContext = new AnnotationConfigApplicationContext();
springContext.scan("com.digite.cloud.swiftalk");
springContext.refresh();
}
ApplicationContext getSwiftalkKafkaClientContext() {
return this.springContext;
}
}
如何传递 Spring 引导自动配置启动 bean 所需的属性?我有 spring.kafka
组属性和自定义属性,它们通过 KafkaHandlerConfiguration
中的 @Value
注释注入
@Value("${digite.swiftalk.kafka.executor.core-pool-size:10}")
private Integer corePoolSize;
@Value("${digite.swiftalk.kafka.executor.max-pool-size:20}")
private Integer maxPoolSize;
@Value("${digite.swiftalk.kafka.executor.queue-capacity:100}")
private Integer queueCapacity;
和
"spring.kafka.producer.properties.max.block.ms=1000",
"spring.kafka.producer.bootstrap-servers=localhost:9999",
"spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer",
"spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
使用 ConfigurableEnvironment
和 MutablePropertySource
对我有用;这是我将环境加载到 Context
中的方法
@Singleton
@ApplicationScoped
class SwiftalkAnnotatedSpringContextLoader {
private final AnnotationConfigApplicationContext springContext;
SwiftalkAnnotatedSpringContextLoader() throws IOException {
springContext = new AnnotationConfigApplicationContext();
ConfigurableEnvironment environment = new StandardEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
Properties appProps = new Properties();
appProps.load(this.getClass().getClassLoader().getResourceAsStream("spring-config.properties"));
propertySources.addFirst(new PropertySource<Properties>("spring-properties", appProps) {
@Override
public Object getProperty(String name) {
return appProps.getProperty(name);
}
});
springContext.setEnvironment(environment);
springContext.scan("com.digite.cloud.swiftalk");
springContext.refresh();
}
ApplicationContext getSwiftalkKafkaClientContext() {
return this.springContext;
}
}
在 src/test/resources
中添加了一个文件
spring.data.mongodb.database=embedded
spring.data.mongodb.port=12345
spring.data.mongodb.host=localhost
spring.kafka.producer.properties.max.block.ms=2000
spring.kafka.producer.bootstrap-servers=localhost:19092
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
digite.swiftalk.kafka.upstream-type-header=UPSTREAM-TYPE
digite.swiftalk.kafka.upstream-instance-header=INSTANCE-HEADER
digite.swiftalk.kafka.message-key-header=MESSAGE-KEY-HEADER
digite.swiftalk.kafka.executor.core-pool-size=20
digite.swiftalk.kafka.executor.max-pool-size=50
digite.swiftalk.kafka.executor.queue-capacity=1000
和测试
@Test
void testLoadsSpringApplicationContext() throws IOException {
SwiftalkAnnotatedSpringContextLoader loader = new SwiftalkAnnotatedSpringContextLoader();
SwiftalkKafkaGateway kafkaGateway = loader.getSwiftalkKafkaClientContext().getBean(SwiftalkKafkaGateway.class);
assertNotNull(kafkaGateway);
ThreadPoolTaskExecutor asyncExecutor = loader.getSwiftalkKafkaClientContext().getBean(
ThreadPoolTaskExecutor.class);
Assertions.assertTrue(asyncExecutor.getCorePoolSize() == 20);
}
corePoolSize
在 spring 引导库中的默认值为 10
@Value("${digite.swiftalk.kafka.executor.core-pool-size:10}")
private Integer corePoolSize;
我已经使用 Spring Boot 创建了一个用作 kafka 客户端的库,基本上库中只有 类 每个都用 @SpringBootConfiguration
注释
和 @EnableAutoConfiguration
注释。
@Slf4j
@SpringBootConfiguration
@EnableAutoConfiguration
public class KafkaHandlerConfiguration {
...
}
和
@Service
interface SwiftalkKafkaGateway {
...
}
我为它创建了一个带有依赖项的 jar,这个 JAR 将通过 CDI 在 Java EE webapp 中使用。 我将通过此代码在 CDI 上下文中获取 bean
@Singleton
@ApplicationScoped
class SwiftalkAnnotatedSpringContextLoader {
private final AnnotationConfigApplicationContext springContext;
SwiftalkAnnotatedSpringContextLoader() {
springContext = new AnnotationConfigApplicationContext();
springContext.scan("com.digite.cloud.swiftalk");
springContext.refresh();
}
ApplicationContext getSwiftalkKafkaClientContext() {
return this.springContext;
}
}
如何传递 Spring 引导自动配置启动 bean 所需的属性?我有 spring.kafka
组属性和自定义属性,它们通过 KafkaHandlerConfiguration
@Value
注释注入
@Value("${digite.swiftalk.kafka.executor.core-pool-size:10}")
private Integer corePoolSize;
@Value("${digite.swiftalk.kafka.executor.max-pool-size:20}")
private Integer maxPoolSize;
@Value("${digite.swiftalk.kafka.executor.queue-capacity:100}")
private Integer queueCapacity;
和
"spring.kafka.producer.properties.max.block.ms=1000",
"spring.kafka.producer.bootstrap-servers=localhost:9999",
"spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer",
"spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer",
使用 ConfigurableEnvironment
和 MutablePropertySource
对我有用;这是我将环境加载到 Context
@Singleton
@ApplicationScoped
class SwiftalkAnnotatedSpringContextLoader {
private final AnnotationConfigApplicationContext springContext;
SwiftalkAnnotatedSpringContextLoader() throws IOException {
springContext = new AnnotationConfigApplicationContext();
ConfigurableEnvironment environment = new StandardEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
Properties appProps = new Properties();
appProps.load(this.getClass().getClassLoader().getResourceAsStream("spring-config.properties"));
propertySources.addFirst(new PropertySource<Properties>("spring-properties", appProps) {
@Override
public Object getProperty(String name) {
return appProps.getProperty(name);
}
});
springContext.setEnvironment(environment);
springContext.scan("com.digite.cloud.swiftalk");
springContext.refresh();
}
ApplicationContext getSwiftalkKafkaClientContext() {
return this.springContext;
}
}
在 src/test/resources
spring.data.mongodb.database=embedded
spring.data.mongodb.port=12345
spring.data.mongodb.host=localhost
spring.kafka.producer.properties.max.block.ms=2000
spring.kafka.producer.bootstrap-servers=localhost:19092
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
digite.swiftalk.kafka.upstream-type-header=UPSTREAM-TYPE
digite.swiftalk.kafka.upstream-instance-header=INSTANCE-HEADER
digite.swiftalk.kafka.message-key-header=MESSAGE-KEY-HEADER
digite.swiftalk.kafka.executor.core-pool-size=20
digite.swiftalk.kafka.executor.max-pool-size=50
digite.swiftalk.kafka.executor.queue-capacity=1000
和测试
@Test
void testLoadsSpringApplicationContext() throws IOException {
SwiftalkAnnotatedSpringContextLoader loader = new SwiftalkAnnotatedSpringContextLoader();
SwiftalkKafkaGateway kafkaGateway = loader.getSwiftalkKafkaClientContext().getBean(SwiftalkKafkaGateway.class);
assertNotNull(kafkaGateway);
ThreadPoolTaskExecutor asyncExecutor = loader.getSwiftalkKafkaClientContext().getBean(
ThreadPoolTaskExecutor.class);
Assertions.assertTrue(asyncExecutor.getCorePoolSize() == 20);
}
corePoolSize
在 spring 引导库中的默认值为 10
@Value("${digite.swiftalk.kafka.executor.core-pool-size:10}")
private Integer corePoolSize;