這節介紹如何使用spring batch。
Spring batch 默認爲內存方式(HSQLDB),但是產品功能上需要進行監控job狀態,以及異常情況。所以採用了存儲到數據庫(Mysql),
那麼就需要爲這部分建表,那麼我們需要配置JobRepository去使用MySQL。建表腳本在 spring-batch-core jar包下的org.springframework.batch.core中。
一、搭建環境
1. 引入jar包,採用Maven的方式引入。具體需要的包可以自己慢慢試。
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-admin-manager</artifactId>
<version>1.3.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-integration</artifactId>
<version>${spring.batch.version}</version>
</dependency>
2. 配置XML,如果集成spring boot則按官方文檔就可以。
a. 配置jobRepository
<batch:job-repository id="jobRepository" transaction-manager="main_txManager"
isolation-level-for-create="REPEATABLE_READ" table-prefix="BATCH_" max-varchar-length="1000" />
b. 配置jobLauncher
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository"/>
</bean>
c. 配置job參數傳遞bean jobParameterBulider
<bean id="jobParameterBulider" class="org.springframework.batch.core.JobParametersBuilder" />
d. 配置job異步多線程
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
二、 開始簡單的job編寫
個人喜歡先寫配置,再寫業務邏輯上的東西。我們以提貨提醒爲例子,當然這個是去除了很多東西的例子,將就着看。
<!-- start 早上到貨提醒 JOB-->
<batch:job id="pickUpJob" restartable="true">
<!-- master step, 10 threads (grid-size) -->
<batch:step id="pickUpmasterStep" >
<!-- partitioner="pickUpPartitioner" 爲前期處理job傳遞過來的參數-->
<partition step="pickUpSlave" partitioner="pickUpPartitioner" >
<!-- grid-size="10" :啓用多線程,線程爲10-->
<handler grid-size="10" task-executor="taskExecutor" />
</partition>
</batch:step>
</batch:job>
<!-- 採用主step,嵌套step的方式,習慣這樣的寫法,也可以直接在主step中配置以下內容 -->
<batch:step id="pickUpSlave" >
<batch:tasklet transaction-manager="main_txManager" >
<batch:chunk reader="pickUpReader" writer="pickUpWriter"
processor="pickUpProcessor" skip-limit="20" commit-interval="100" >
<!--可跳過異常 -->
<batch:skippable-exception-classes>
<batch:include class="java.lang.Exception"/>
</batch:skippable-exception-classes>
<!-- 多輸出方式 -->
<!--<batch:streams>
<batch:stream ref="pickUpWriter" />
<batch:stream ref="productItemWriter2"/>
</batch:streams>-->
</batch:chunk>
</batch:tasklet>
<!--監聽:此方法中主要用於讀取固定的數據 -->
<batch:listeners>
<batch:listener ref="pickUpListener" before-step-method="beforeStep" />
</batch:listeners>
</batch:step>
<bean id="pickUpListener" class="com.cwenao.cc.scheduler.batch.listener.PickUpListener" />
<!-- 讀取數據,本文采用mybatis方式,當然也可以採用jdbctemplemet,不是很推薦用mybatis -->
<!-- 因爲這裏有個大坑在裏面:mybatis使用批處理的時候,它的typ就是batch,那麼我單獨查詢服用之前的查詢的時候會拋出異常。那麼問題來了,如果想解決這個問題,那麼必須自己擴展mybatis的東西,這裏不再闡述,有興趣的自行研究-->
<bean id="pickUpReader" class="org.mybatis.spring.batch.MyBatisPagingItemReader" scope="step" >
<property name="sqlSessionFactory" ref="sqlSessionFactory" />
<!-- 傳入mybatis定義的查詢名字 -->
<property name="queryId" value="com.cwenao.cc.basic.dao.OrderDao.selectForBatchNotPick" />
<!--分頁查詢 -->
<property name="pageSize" value="100"/>
<!-- 查詢條件 -->
<property name="parameterValues">
<map>
<!-- 查詢條件值來至於 partitioner-->
<entry key="sTime" value="#{stepExecutionContext[sTime]}" />
<entry key="eTime" value="#{stepExecutionContext[eTime]}" />
</map>
</property>
</bean>
<!--結果寫入庫-->
<bean id="pickUpWriter" class="org.mybatis.spring.batch.MyBatisBatchItemWriter" scope="step">
<property name="sqlSessionFactory" ref="sqlSessionFactory"/>
<property name="statementId" value="com.cwenao.cc.basic.dao.NoticeInfoDao.insertSelective"/>
</bean>
<!-- end 早上到貨提醒 JOB-->
以上爲配置文件,接下來一個個需要實現的類:
job啓動
public class BatchRedExpireOpenIdJob {
@Autowired
private JobLauncher jobLauncher;
@Autowired
private Job redExpireOpenIdJob;
@Autowired
JobParametersBuilder jobParameterBulider;
public boolean doExecuteTask(String jobId)
{
if(null == jobParameterBulider)
{
jobParameterBulider = SpringUtil.getBean("jobParameterBulider", JobParametersBuilder.class);
}
if(null == redExpireOpenIdJob)
{
redExpireOpenIdJob = SpringUtil.getBean("redExpireOpenIdJob",Job.class);
}
if(null == jobLauncher)
{
jobLauncher = SpringUtil.getBean("jobLauncher",JobLauncher.class);
}
//傳遞參數:jobId作爲啓動的job唯一標識
jobParameterBulider.addDate("date", new Date());
jobParameterBulider.addString("jobId", jobId);
try {
//啓動job
JobExecution execution = jobLauncher.run(redExpireOpenIdJob,jobParameterBulider.toJobParameters());
} catch (JobExecutionAlreadyRunningException e) {
e.printStackTrace();
} catch (JobRestartException e) {
e.printStackTrace();
} catch (JobInstanceAlreadyCompleteException e) {
e.printStackTrace();
} catch (JobParametersInvalidException e) {
e.printStackTrace();
}
return true;
}
}
pickUpPartitioner類
@Scope("step")
@Component("pickUpPartitioner")
public class pickUpPartitioner implements Partitioner {
//參數來至於job啓動的時候
@Value("#{jobParameters['jobId']}")
private String jobId;
@Resource
TaskSchedulerDao taskSchedulerDao;
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
//昨天的訂單日期
String preOrderTime= "";
//前天的訂單日期
TaskScheduler taskScheduler = taskSchedulerDao.selectByPrimaryKey(jobId);
int days=0;
int hours=0;
if(null !=taskScheduler)
{
String hoursArry[] = taskScheduler.getTaskName().split("#");
if(null != hoursArry && hoursArry.length>1)
{
hours = Integer.parseInt(hoursArry[0]);
}
}
if(hours != 0)
{
days = hours/24;
}
String pre2OrderTime="";
if(days==0){
preOrderTime= DateUtil.dateToStr(DateUtil.getPreDay(), "yyyy-MM-dd");
}
else
preOrderTime= DateUtil.dateToStr(DateUtil.getPreDay(-days), "yyyy-MM-dd");
String sTime = preOrderTime+" 00:00:00";
String eTime = preOrderTime+" 23:59:59";
ExecutionContext value = new ExecutionContext();
value.putString("sTime", sTime);
value.putString("eTime", eTime);
result.put("partition", value);
return result;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
}
讀取數據庫與寫入數據庫就不再寫,就是一般的sql語句。
處理過程pickUpProcessor
pickUpProcessor類
@Scope("step")
@Component("pickUpProcessor")
public class PickUpProcessor implements ItemProcessor<OrderVo,NoticeInfo> {
private static Map<String,PickPoint> pickPointMap=new HashMap<>();
//提貨通知
//獲取參數jobId
@Value("#{jobParameters['jobId']}")
private String jobId;
/**
* 處理過程,返回類型與傳入參數和ItemProcessor<OrderVo,NoticeInfo>相同
*/
@Override
public NoticeInfo process( OrderVo order) throws Exception {
if(null == order){
return null;
}
Integer amOrPm = DateUtil.getAmPm();
NoticeInfo noticeInfo = new NoticeInfo();
noticeInfo.setId(UUIDUtil.generateUUID());
noticeInfo.setStatus(APIConstant.status_enable);
return noticeInfo;
}
//要獲取JobID必須有get方法
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
}
監聽類與一般類無二,主要看業務上的使用。
到目前爲止,一個簡單的job就算走通了。
附錄
《spring batch in action》,這本書是我的主要參考文檔,看原文的,翻譯什麼的不是很好。
如有疑問請加公衆號(K171),如果覺得對您有幫助請 github start