![](/img/trans.png)
[英]Spring Batch CompositeItemWriter Transaction Roll back issue
[英]Spring Batch Transaction issue
我正在使用 spring Batch 實現應用程序。 我正在關注 ItemReader、處理器、ItemWriter 方法。 我已經創建了分區數據的分區器組件。 通過 ItemReader 我正在讀取數據並對其進行處理。 處理后我在數據庫中寫回數據。 工作完成后,我觀察到數據庫中缺少一些數據。 有時一個分區的執行會失敗。 有時 Job 會成功執行。 有時我會遇到異常。 它的隨機。
是否有任何線程同步或事務,我們需要維護?
例如
此方案在不使用分區或使用多線程步驟的情況下工作正常
示例代碼 - :此代碼有時有效並提交所有數據,有時失敗..有時我觀察到很少有數據未提交到數據庫中(即使 BATCH_STEP_EXECUTION 表中的提交計數是正確的)。 這有點隨機。
@Configuration
@EnableBatchProcessing
@EnableTransactionManagement
@EnableAspectJAutoProxy(proxyTargetClass = true)
public class BatchConfig {
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
return launcher;
}
@Bean(name = "customerJob")
public Job prepareBatch1() {
return jobBuilderFactory.get("customerJob").incrementer(new RunIdIncrementer()).start(masterStep()).listener(listener())
.build();
}
@Bean
public Step masterStep() {
return stepBuilderFactory.get("masterStep").
partitioner(slaveStep().getName(), partitioner())
.partitionHandler(partitionHandler())
.build();
}
@Bean
public BatchListener listener() {
return new BatchListener();
}
@Bean
@JobScope
public BatchPartitioner partitioner() {
return new BatchPartitioner();
}
@Bean
@StepScope
public PartitionHandler partitionHandler() {
TaskExecutorPartitionHandler taskExecutorPartitionHandler = new TaskExecutorPartitionHandler();
taskExecutorPartitionHandler.setGridSize(2);
taskExecutorPartitionHandler.setTaskExecutor(taskExecutor());
taskExecutorPartitionHandler.setStep(slaveStep());
try {
taskExecutorPartitionHandler.afterPropertiesSet();
} catch (Exception e) {
return taskExecutorPartitionHandler;
}
@Bean
@StepScope
public Step slaveStep() {
return stepBuilderFactory.get("slaveStep").<Customer, CustomerWrapperDTO>chunk(100)
.reader(getReader())
.processor(processor())
.writer(writer())
.build();
}
@Bean
@StepScope
public BatchWriter writer() {
return new BatchWriter();
}
@Bean
@StepScope
public BatchProcessor processor() {
return new BatchProcessor();
}
@Bean
@StepScope
public BatchReader getReader() {
return new BatchReader();
}
@Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setMaxPoolSize(Runtime.getRuntime().availableProcessors());
taskExecutor.setCorePoolSize(Runtime.getRuntime().availableProcessors());
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
}
class CustomerWrapperDTO {
private Address address;
private Customer customer;
//setter getter address, customer
}
實體
class Customer {
String processStatus; // "U" : unprocessed, "C" : completed, "F" : Failed
}
public class BatchListener implements JobExecutionListener {
@Autowired
private CustomerRepo customerRepo;
public BatchListener() {
}
@Override
public void beforeJob(JobExecution jobExecution) {
List<Customer> customers;
try {
customers = customerRepo.getAllUnprocessedCustomer);
} catch (Exception e) {
throw new CustomerException("failed in BatchListener", e);
}
jobExecution.getExecutionContext().put("customers",customers);
}
@Override
public void afterJob(JobExecution jobExecution) {
}
}
public class BatchPartitioner implements Partitioner {
@Value("#{jobExecutionContext[customers]}")
private List<Customer> customers;
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> result = new HashMap<>();
int size = customers.size() / gridSize;
List<List<Customer>> lists = IntStream.range(0, customers.size()).boxed()
.collect(Collectors.groupingBy(i -> i / size,
Collectors.mapping(customers::get, Collectors.toList())))
.values().stream().collect(Collectors.toList());
for (int i = 0; i < gridSize; i++) {
ExecutionContext executionContext = new ExecutionContext();
executionContext.putString("name", "Thread_" + i);
executionContext.put("customers", lists.get(i));
result.put("partition" + i, executionContext);
}
return result;
}
}
@Component
@StepScope
Class BatchReader {
private int index;
@Value("#{stepExecutionContext[customers]}")
private List<Customer> customers;
@Override
public Customer read() {
Customer Customer = null;
if (index < customers.size()) {
Customer = customers.get(index);
index++;
} else {
index = 0;
}
return Customer;
}
}
@Component
@StepScope
public class BatchProcessor implements ItemProcessor<Customer, CustomerWrapperDTO> {
public BatchProcessor() {
}
@Override
public BatchProcessor process(Customer item) {
CustomerWrapperDTO customerWrapper = new CustomerWrapperDTO();
try {
// logic to get address
Address address = // API call or some business logic.
item.setAddress(address);
item.setProcessStatus("C"); // Completed
}catch(Exception e) {
item.setProcessStatus("F");// failed
}
//logic to get Address
customerWrapper.setCustomer(item);
return customerWrapper;
}
}
@Component
@StepScope
public class BatchWriter implements ItemWriter<CustomerBatchWrapperDTO> {
@Autowired
private CustmerRepo customerRepo;
@Autowired
private AddressRepo addessRepo;
public BatchWriter() {
}
@Override
public void write(List<? extends CustomerBatchWrapperDTO> items) {
items.forEach(item -> {
try {
if(item.getCustomer() != null) {
customerRepo.merge(item.getCustomer());
}
if(item.getAddress() != null) {
addessRepo.save(item.getAddress());
}
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
}
Spring 批處理將按塊處理。 如果一個塊失敗(這意味着至少有一個項目未能處理),事務將被回滾。
問題在於您的項目閱讀器:
read
方法或將您的閱讀器包裝在SynchronizedItemStreamReader
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.