Spring 批处理无法在我的集成测试中加载 JobBuilderFactory
Spring Batch cannot Load the JobBuilderFactory in my integration test
我的配置可以成功运行并加载细胞系数据并发布给细胞系主题中的各个收件人。它工作正常,但是当我尝试加载 JobLauncherTestUtils 和 JobRepositoryTestUtils 时,我收到一条错误消息,提示未找到 JobBuilderFactory。正如您将从我的配置中看到的那样,我确实使用委托给 Spring 的 Lombok 加载了 JobBuilderFactory 和 StepBuilderFactory。就像我说的一切正常,但测试
这是测试配置yaml文件
申请-test.yml
spring:
sql:
init:
schema-locations: classpath:db/migration
platform: derby
jobmeta-ds:
datasource:
driver-class-name: org.apache.derby.jdbc.EmbeddedDriver
url: jdbc:derby:support/jhhmeta;create=true
password:
jndi-name: false
cell-datasource:
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@localhost:1521:xe
password:
jndi-name: false
以下是数据源:
// CellDbConfig class
@Configuration
public class CellDbConfig {
@Bean
@ConfigurationProperties("cell-datasource")
public DataSourceProperties cellLineDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name = "cellDataSource")
public DataSource cellDataSource() {
HikariDataSource dataSource = cellLineDataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource.class)
.build();
return dataSource;
}
@Bean(name = "cellJdbcTemplate")
public JdbcTemplate cellJdbcTemplate(@Qualifier("cellDataSource") DataSource cellDatataSource) {
return new JdbcTemplate(cellDataSource);
}
}
这是 JobRepository 数据源配置的另一个数据源
@Configuration
public class JobRepoMetadataDbConfig {
@Primary
@Bean
@ConfigurationProperties("jobmeta.datasource")
public DataSourceProperties jobMetadataProperties() {
return new DataSourceProperties();
}
@Primary
@Bean(name = "jobMetaDataSource")
public DataSource dataSourceJobMeta() {
DataSource dataSource = jobMetadataProperties().initializeDataSourceBuilder().type(BasicDataSource.class)
.build();
return dataSource;
}
@Bean(name = "jobMetaJdbcTemplate")
public JdbcTemplate jobMetaJdbcTemplate(@Qualifier("jobMetaDataSource") DataSource jobMetaDataSource) {
return new JdbcTemplate(jobMetaDataSource);
}
}
这里是 Spring 批处理特定配置,即 JobRepository 等
@Configuration
@EnableBatchProcessing
@RequiredArgsConstructor
public class JobRepoConfig {
@Qualifier("jobMetaDataSource")
final DataSource jobMetaDataSource;
@Bean
AbstractPlatformTransactionManager jobTransactionManager() {
return new ResourcelessTransactionManager();
}
@Bean
public JobRepositoryFactoryBean jobRepositoryFactory() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(jobMetaDataSource);
factory.setTransactionManager(jobTransactionManager());
factory.afterPropertiesSet();
return factory;
}
@Bean
public JobRepository jobRepository() throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(jobMetaDataSource);
jobRepositoryFactoryBean.setTransactionManager(jobTransactionManager());
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.H2.getProductName());
return jobRepositoryFactoryBean.getObject();
}
@Bean
public SimpleJobLauncher launchAppJobLauncher() throws Exception{
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
}
这里是发布cell line数据的KafkaProducer配置:
@Configuration
@Slf4j
public class ProducerConfig {
@Value("${spring.kafka.template.default-topic}")
private String cellsTopic;
@Bean
public ProducerFactory<Long, CellVO> kafkaProducerFactory(KafkaProperties kafkaProperties) {
var producerProperties = kafkaProperties.buildProducerProperties();
var sslProperties = kafkaProperties.getSsl().buildProperties();
Map<String, Object> props = new HashMap<>(producerProperties);
if (!CollectionUtils.isEmpty(sslProperties)) {
props.putAll(sslProperties);
}
return new DefaultKafkaProducerFactory<>(props);
}
@Bean
public KafkaTemplate<Long, CellVO> kafkaTemplate(ProducerFactory<Long, CellVO> kafkaProducerFactory) {
KafkaTemplate<Long, CellVO> kafkaTemplate = new KafkaTemplate<Long, CellVO>(kafkaProducerFactory);
kafkaTemplate.setDefaultTopic(cellsTopic);
return kafkaTemplate;
}
}
这里是 Spring 批量测试 class:
@SpringBatchTest
@SpringBootTest
@ActiveProfiles("test")
@Tag("integration")
@EnableAutoConfiguration
public class CellCongTest {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
@Test
public void testSuccessfulLoad() throws Exception {
}
}
最后是批处理作业本身:
@Configuration
@EnableScheduling
@RequiredArgsConstructor
@Slf4j
public class CellBatchJobConfig {
final JobBuilderFactory jobBuilderFactory;
final JobLauncher jobAppJobLauncher;
final StepBuilderFactory stepBuilderFactory;
final KafkaTemplate<Long, CellVO> kafkaTemplate;
final KafkaItemWriteListener kafkaItemWriteListener;
final static String CELL_LINE_JOB = "CELL_LINE_JOB";
@Value("${chunk-size}")
private int chunkSize;
@Qualifier("cellDataSource")
final DataSource cellDataSource;
@Bean
public JdbcPagingItemReader<CellVO> cellDataReader(
PagingQueryProvider pagingQueryProvider) {
return new JdbcPagingItemReaderBuilder<CellVO>()
.name("cellDataReader")
.dataSource(cellDataSource)
.queryProvider(pagingQueryProvider)
.pageSize(chunkSize)
.rowMapper(new CellRowMapper())
.build();
}
@Bean
public PagingQueryProvider pagingQueryProvider() {
OraclePagingQueryProvider pagingQueryProvider = new OraclePagingQueryProvider();
final Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("CELL_ID", Order.ASCENDING);
pagingQueryProvider.setSortKeys(sortKeys);
pagingQueryProvider.setSelectClause(" CELL_ID, CELL_TYPE, SITE, CELL_QUALITY_LINE ");
pagingQueryProvider.setFromClause(" FROM DCV.CELL_LINES");
return pagingQueryProvider;
}
@Bean
public KafkaItemWriter<Long, CellVO> kafkaItemWriter() throws Exception {
KafkaItemWriter<Long, CellVO> kafkaItemWriter = new KafkaItemWriter<>();
kafkaItemWriter.setKafkaTemplate(kafkaTemplate);
kafkaItemWriter.setItemKeyMapper(CellVO::getLocationId);
kafkaItemWriter.setDelete(false);
kafkaItemWriter.afterPropertiesSet();
return kafkaItemWriter;
}
@Bean
public Step loadCellLines() throws Exception {
return stepBuilderFactory.get("step1")
.<CellVO, CellVO>chunk(chunkSize)
.reader(cellDataReader(pagingQueryProvider()))
.writer(kafkaItemWriter())
.listener(kafkaItemWriteListener)
.build();
}
@Bean
public Job cellLineJob() throws Exception {
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
@Bean("jobParameters")
JobParameters jobParameters() {
JobParameters jobParameters = new JobParametersBuilder()
.addString("jobId", UUID.randomUUID().toString())
.addDate("date", new Date())
.addLong("time", System.currentTimeMillis()).toJobParameters();
return jobParameters;
}
@Scheduled(cron = "0 0 5 * * *")
public Job runCellLineJob() throws Exception {
kafkaItemWriteListener.setItems(new ArrayList<>());
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
}
不幸的是,测试失败并显示无法加载应用程序上下文的消息:
错误如下:
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jobLauncherTestUtils':
通过方法 'setJob' 参数 0 表示的未满足的依赖关系;嵌套异常是 org.springframework.beans.factory.UnsatisfiedDependencyException:创建名称为“cellBatchJobConfig”的 bean 时出错:通过构造函数参数 0 表达的依赖关系不满足;嵌套异常是 org.springframework.beans.factory.NoSuchBeanDefinitionException:没有可用类型 'org.springframework.batch.core.configuration.annotation.JobBuilderFactory' 的符合条件的 bean:预计至少有 1 个符合自动装配候选条件的 bean。依赖注解:{}
我尝试过的一件事是手动注入作业,但这没有用:我什至不知道为什么它应该能够找到作业,如果它可以在实际配置中找到作业,但不能在测试
@Configuration
class JobLaunchUtilsCellLine {
@Autowired
@Qualifier("cellLineJob")
Job cellLineJob;
@Bean
public JobLauncherTestUtils cellLineJobLauncherUtils(){
JobLauncherTestUtils jobLauncherTestUtils = new JobLauncherTestUtils();
jobLauncherTestUtils.setJob(cellLineJob);
return jobLauncherTestUtils;
}
}
然后我在 Spring 批测试中像这样注入它,但它不起作用:
@Qualifier("cellLineJobLauncherUtils")
@Autowired
JobLauncherTestUtils cellLineJobLauncherUtils;
但是,它仍然抱怨 JobBuilderFactory bean does not exist
我们在添加新的计划作业配置时遇到了同样的问题
它是如何解决的:
- 创建 JobLaunchUtils(类似于您的)
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.context.annotation.Bean
class JobSpecConfiguration {
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils()
}
@Bean
JobRepositoryTestUtils getJobRepositoryTestUtils() {
new JobRepositoryTestUtils()
}
}
- 删除
@SpringBatchTest
并在 @ContextConfiguration
中添加所需的配置。我们可以通过这样做来解决 bean 依赖性问题。
import org.spockframework.spring.SpringBean
import org.springframework.batch.core.Job
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.boot.test.context.TestConfiguration
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Primary
import org.springframework.test.annotation.DirtiesContext
import org.springframework.test.context.ActiveProfiles
import org.springframework.test.context.ContextConfiguration
import spock.lang.Specification
@DirtiesContext
@ContextConfiguration(classes = [Application, TestConfig])
@ActiveProfiles(['test', 'kafka'])
class SubmitFilesJobSpec extends Specification {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils
@SpringBean
private SomeRepo someRepo = Mock()
def cleanup() {
jobRepositoryTestUtils.removeJobExecutions()
}
//some unit test that we have
def "Verify batch run"() {
given: "At least 1 Open Record"
def record = defaultData()
someRepo.findTop1ByStatus(_) >> record
when: "A batch job has been triggered"
def jobExecution = jobLauncherTestUtils.launchJob(BaseJobExecution.getJobParameters(null))
then: "Job will be completed with at least 1 persisted/processed record"
2 == jobExecution.getStepExecutions().size()
jobExecution.getStepExecutions().forEach(stepExecution -> {
1 == stepExecution.getWriteCount()
})
"SOME_JOB_NAME" == jobExecution.getJobInstance().getJobName()
"COMPLETED" == jobExecution.getExitStatus().getExitCode()
}
@TestConfiguration
static class TestConfig extends JobSpecConfiguration {
@Override
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils() {
@Override
@Autowired
void setJob(@Qualifier("submitFilesJob") Job job) {
super.setJob(job)
}
}
}
}
使用 Spock 框架成功进行单元测试 运行
我的配置可以成功运行并加载细胞系数据并发布给细胞系主题中的各个收件人。它工作正常,但是当我尝试加载 JobLauncherTestUtils 和 JobRepositoryTestUtils 时,我收到一条错误消息,提示未找到 JobBuilderFactory。正如您将从我的配置中看到的那样,我确实使用委托给 Spring 的 Lombok 加载了 JobBuilderFactory 和 StepBuilderFactory。就像我说的一切正常,但测试 这是测试配置yaml文件
申请-test.yml
spring:
sql:
init:
schema-locations: classpath:db/migration
platform: derby
jobmeta-ds:
datasource:
driver-class-name: org.apache.derby.jdbc.EmbeddedDriver
url: jdbc:derby:support/jhhmeta;create=true
password:
jndi-name: false
cell-datasource:
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@localhost:1521:xe
password:
jndi-name: false
以下是数据源:
// CellDbConfig class
@Configuration
public class CellDbConfig {
@Bean
@ConfigurationProperties("cell-datasource")
public DataSourceProperties cellLineDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name = "cellDataSource")
public DataSource cellDataSource() {
HikariDataSource dataSource = cellLineDataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource.class)
.build();
return dataSource;
}
@Bean(name = "cellJdbcTemplate")
public JdbcTemplate cellJdbcTemplate(@Qualifier("cellDataSource") DataSource cellDatataSource) {
return new JdbcTemplate(cellDataSource);
}
}
这是 JobRepository 数据源配置的另一个数据源
@Configuration
public class JobRepoMetadataDbConfig {
@Primary
@Bean
@ConfigurationProperties("jobmeta.datasource")
public DataSourceProperties jobMetadataProperties() {
return new DataSourceProperties();
}
@Primary
@Bean(name = "jobMetaDataSource")
public DataSource dataSourceJobMeta() {
DataSource dataSource = jobMetadataProperties().initializeDataSourceBuilder().type(BasicDataSource.class)
.build();
return dataSource;
}
@Bean(name = "jobMetaJdbcTemplate")
public JdbcTemplate jobMetaJdbcTemplate(@Qualifier("jobMetaDataSource") DataSource jobMetaDataSource) {
return new JdbcTemplate(jobMetaDataSource);
}
}
这里是 Spring 批处理特定配置,即 JobRepository 等
@Configuration
@EnableBatchProcessing
@RequiredArgsConstructor
public class JobRepoConfig {
@Qualifier("jobMetaDataSource")
final DataSource jobMetaDataSource;
@Bean
AbstractPlatformTransactionManager jobTransactionManager() {
return new ResourcelessTransactionManager();
}
@Bean
public JobRepositoryFactoryBean jobRepositoryFactory() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(jobMetaDataSource);
factory.setTransactionManager(jobTransactionManager());
factory.afterPropertiesSet();
return factory;
}
@Bean
public JobRepository jobRepository() throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(jobMetaDataSource);
jobRepositoryFactoryBean.setTransactionManager(jobTransactionManager());
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.H2.getProductName());
return jobRepositoryFactoryBean.getObject();
}
@Bean
public SimpleJobLauncher launchAppJobLauncher() throws Exception{
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
}
这里是发布cell line数据的KafkaProducer配置:
@Configuration
@Slf4j
public class ProducerConfig {
@Value("${spring.kafka.template.default-topic}")
private String cellsTopic;
@Bean
public ProducerFactory<Long, CellVO> kafkaProducerFactory(KafkaProperties kafkaProperties) {
var producerProperties = kafkaProperties.buildProducerProperties();
var sslProperties = kafkaProperties.getSsl().buildProperties();
Map<String, Object> props = new HashMap<>(producerProperties);
if (!CollectionUtils.isEmpty(sslProperties)) {
props.putAll(sslProperties);
}
return new DefaultKafkaProducerFactory<>(props);
}
@Bean
public KafkaTemplate<Long, CellVO> kafkaTemplate(ProducerFactory<Long, CellVO> kafkaProducerFactory) {
KafkaTemplate<Long, CellVO> kafkaTemplate = new KafkaTemplate<Long, CellVO>(kafkaProducerFactory);
kafkaTemplate.setDefaultTopic(cellsTopic);
return kafkaTemplate;
}
}
这里是 Spring 批量测试 class:
@SpringBatchTest
@SpringBootTest
@ActiveProfiles("test")
@Tag("integration")
@EnableAutoConfiguration
public class CellCongTest {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
@Test
public void testSuccessfulLoad() throws Exception {
}
}
最后是批处理作业本身:
@Configuration
@EnableScheduling
@RequiredArgsConstructor
@Slf4j
public class CellBatchJobConfig {
final JobBuilderFactory jobBuilderFactory;
final JobLauncher jobAppJobLauncher;
final StepBuilderFactory stepBuilderFactory;
final KafkaTemplate<Long, CellVO> kafkaTemplate;
final KafkaItemWriteListener kafkaItemWriteListener;
final static String CELL_LINE_JOB = "CELL_LINE_JOB";
@Value("${chunk-size}")
private int chunkSize;
@Qualifier("cellDataSource")
final DataSource cellDataSource;
@Bean
public JdbcPagingItemReader<CellVO> cellDataReader(
PagingQueryProvider pagingQueryProvider) {
return new JdbcPagingItemReaderBuilder<CellVO>()
.name("cellDataReader")
.dataSource(cellDataSource)
.queryProvider(pagingQueryProvider)
.pageSize(chunkSize)
.rowMapper(new CellRowMapper())
.build();
}
@Bean
public PagingQueryProvider pagingQueryProvider() {
OraclePagingQueryProvider pagingQueryProvider = new OraclePagingQueryProvider();
final Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("CELL_ID", Order.ASCENDING);
pagingQueryProvider.setSortKeys(sortKeys);
pagingQueryProvider.setSelectClause(" CELL_ID, CELL_TYPE, SITE, CELL_QUALITY_LINE ");
pagingQueryProvider.setFromClause(" FROM DCV.CELL_LINES");
return pagingQueryProvider;
}
@Bean
public KafkaItemWriter<Long, CellVO> kafkaItemWriter() throws Exception {
KafkaItemWriter<Long, CellVO> kafkaItemWriter = new KafkaItemWriter<>();
kafkaItemWriter.setKafkaTemplate(kafkaTemplate);
kafkaItemWriter.setItemKeyMapper(CellVO::getLocationId);
kafkaItemWriter.setDelete(false);
kafkaItemWriter.afterPropertiesSet();
return kafkaItemWriter;
}
@Bean
public Step loadCellLines() throws Exception {
return stepBuilderFactory.get("step1")
.<CellVO, CellVO>chunk(chunkSize)
.reader(cellDataReader(pagingQueryProvider()))
.writer(kafkaItemWriter())
.listener(kafkaItemWriteListener)
.build();
}
@Bean
public Job cellLineJob() throws Exception {
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
@Bean("jobParameters")
JobParameters jobParameters() {
JobParameters jobParameters = new JobParametersBuilder()
.addString("jobId", UUID.randomUUID().toString())
.addDate("date", new Date())
.addLong("time", System.currentTimeMillis()).toJobParameters();
return jobParameters;
}
@Scheduled(cron = "0 0 5 * * *")
public Job runCellLineJob() throws Exception {
kafkaItemWriteListener.setItems(new ArrayList<>());
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
}
不幸的是,测试失败并显示无法加载应用程序上下文的消息:
错误如下:
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jobLauncherTestUtils':
通过方法 'setJob' 参数 0 表示的未满足的依赖关系;嵌套异常是 org.springframework.beans.factory.UnsatisfiedDependencyException:创建名称为“cellBatchJobConfig”的 bean 时出错:通过构造函数参数 0 表达的依赖关系不满足;嵌套异常是 org.springframework.beans.factory.NoSuchBeanDefinitionException:没有可用类型 'org.springframework.batch.core.configuration.annotation.JobBuilderFactory' 的符合条件的 bean:预计至少有 1 个符合自动装配候选条件的 bean。依赖注解:{}
我尝试过的一件事是手动注入作业,但这没有用:我什至不知道为什么它应该能够找到作业,如果它可以在实际配置中找到作业,但不能在测试
@Configuration
class JobLaunchUtilsCellLine {
@Autowired
@Qualifier("cellLineJob")
Job cellLineJob;
@Bean
public JobLauncherTestUtils cellLineJobLauncherUtils(){
JobLauncherTestUtils jobLauncherTestUtils = new JobLauncherTestUtils();
jobLauncherTestUtils.setJob(cellLineJob);
return jobLauncherTestUtils;
}
}
然后我在 Spring 批测试中像这样注入它,但它不起作用:
@Qualifier("cellLineJobLauncherUtils")
@Autowired
JobLauncherTestUtils cellLineJobLauncherUtils;
但是,它仍然抱怨 JobBuilderFactory bean does not exist
我们在添加新的计划作业配置时遇到了同样的问题
它是如何解决的:
- 创建 JobLaunchUtils(类似于您的)
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.context.annotation.Bean
class JobSpecConfiguration {
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils()
}
@Bean
JobRepositoryTestUtils getJobRepositoryTestUtils() {
new JobRepositoryTestUtils()
}
}
- 删除
@SpringBatchTest
并在@ContextConfiguration
中添加所需的配置。我们可以通过这样做来解决 bean 依赖性问题。
import org.spockframework.spring.SpringBean
import org.springframework.batch.core.Job
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.boot.test.context.TestConfiguration
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Primary
import org.springframework.test.annotation.DirtiesContext
import org.springframework.test.context.ActiveProfiles
import org.springframework.test.context.ContextConfiguration
import spock.lang.Specification
@DirtiesContext
@ContextConfiguration(classes = [Application, TestConfig])
@ActiveProfiles(['test', 'kafka'])
class SubmitFilesJobSpec extends Specification {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils
@SpringBean
private SomeRepo someRepo = Mock()
def cleanup() {
jobRepositoryTestUtils.removeJobExecutions()
}
//some unit test that we have
def "Verify batch run"() {
given: "At least 1 Open Record"
def record = defaultData()
someRepo.findTop1ByStatus(_) >> record
when: "A batch job has been triggered"
def jobExecution = jobLauncherTestUtils.launchJob(BaseJobExecution.getJobParameters(null))
then: "Job will be completed with at least 1 persisted/processed record"
2 == jobExecution.getStepExecutions().size()
jobExecution.getStepExecutions().forEach(stepExecution -> {
1 == stepExecution.getWriteCount()
})
"SOME_JOB_NAME" == jobExecution.getJobInstance().getJobName()
"COMPLETED" == jobExecution.getExitStatus().getExitCode()
}
@TestConfiguration
static class TestConfig extends JobSpecConfiguration {
@Override
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils() {
@Override
@Autowired
void setJob(@Qualifier("submitFilesJob") Job job) {
super.setJob(job)
}
}
}
}
使用 Spock 框架成功进行单元测试 运行