前言
从第三方获取订单,然后进行处理存入我们库中。
◇整体方案
◇数据库表设计
CREATE TABLE `jd_order` (
`id` int NOT NULL AUTO_INCREMENT,
`status` int NOT NULL DEFAULT '0' COMMENT '0:未抓取,1:表示已经抓取',
`amount` decimal(10,2) NOT NULL COMMENT '订单金额',
`create_user` varchar(50) NOT NULL COMMENT '创建人',
`create_time` datetime NOT NULL,
`update_user` varchar(50) NOT NULL COMMENT '更新人',
`update_time` datetime NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
CREATE TABLE `tmall_order` (
`id` int NOT NULL AUTO_INCREMENT COMMENT '订单id',
`order_status` int NOT NULL DEFAULT '0' COMMENT '订单状态(0:未抓取,1:已抓取)',
`money` decimal(10,2) NOT NULL COMMENT '订单金额',
`create_user` varchar(50) NOT NULL COMMENT '创建人',
`create_time` datetime NOT NULL COMMENT '创建时间',
`update_user` varchar(50) NOT NULL COMMENT '更新人',
`update_time` datetime NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
CREATE TABLE `all_order` (
`id` int NOT NULL AUTO_INCREMENT COMMENT '订单id',
`third_order_id` int NOT NULL COMMENT '第三方订单id',
`type` int NOT NULL COMMENT '类型:0京东订单,1天猫订单',
`total_amount` decimal(10,2) NOT NULL COMMENT '金额',
`create_user` varchar(50) NOT NULL COMMENT '创建用户',
`create_time` datetime NOT NULL COMMENT '创建时间',
`update_user` varchar(50) NOT NULL COMMENT '创建用户',
`update_time` datetime NOT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
◇模拟第三方订单表
- 使用SimpleJob模拟第三方订单产生
- 模拟天猫与京东的订单
【每五秒钟,产生5个订单】 - 修改数据库连接地址
server.port=8074
elasticjob.zookeeper.namespace=springboot-elasticjob
elasticjob.zookeeper.server-list=localhost:2181
spring.datasource.username=root
spring.datasource.password=root
spring.datasource.url=jdbc:mysql://localhost:3306/dataflow?serverTimezone=Asia/Shanghai&useSSL=false
mybatis.mapper-locations=/mybatis/*.xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE generatorConfiguration
PUBLIC "-//mybatis.org//DTD MyBatis Generator Configuration 1.0//EN"
"http://mybatis.org/dtd/mybatis-generator-config_1_0.dtd">
<generatorConfiguration>
<context id="MysqlTables" targetRuntime="MyBatis3">
<jdbcConnection driverClass="com.mysql.cj.jdbc.Driver"
connectionURL="jdbc:mysql://localhost:3306/dataflow?serverTimezone=Asia/Shanghai"
userId="root"
password="root">
<property name="nullCatalogMeansCurrent" value="true"/>
</jdbcConnection>
<javaTypeResolver >
<property name="forceBigDecimals" value="false" />
</javaTypeResolver>
<javaModelGenerator targetPackage="com.zcw.springbootelasticjob.model" targetProject="src\main\java">
<property name="enableSubPackages" value="true" />
<property name="trimStrings" value="true" />
</javaModelGenerator>
<sqlMapGenerator targetPackage="mybatis" targetProject="src\main\resources">
<property name="enableSubPackages" value="true" />
</sqlMapGenerator>
<javaClientGenerator type="XMLMAPPER" targetPackage="com.zcw.springbootelasticjob.dao" targetProject="src\main\java">
<property name="enableSubPackages" value="true" />
</javaClientGenerator>
<table schema="order" tableName="t_order" domainObjectName="Order" >
</table>
</context>
</generatorConfiguration>
- 通过逆向工程,生成我们对应的实体类与mapper文件
编写第三方订单产生的订单任务
- job编写
package com.zcw.springbootelasticjob.job;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.zcw.autoconfig.ElasticSimpleJob;
import com.zcw.springbootelasticjob.service.OrderService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @ClassName : ThirdOrderProduceJob
* @Description : 模拟第三方产生订单的job
* @Author : Zhaocunwei
* @Date: 2020-06-08 13:05
*/
@Component
@ElasticSimpleJob(
jobName="thirdOrderProduceJob",
cron = "0/5 * * * * ?",
shardingTotalCount = 1,
overwrite = true
)
public class ThirdOrderProduceJob implements SimpleJob {
@Autowired
private OrderService orderService;
@Override
public void execute(ShardingContext shardingContext) {
orderService.produceThirdOrder();
}
}
- 业务类代码
public void produceThirdOrder() {
for(int i =0;i<5;i++){
Random random = new Random();
int randomInt = random.nextInt(2);
//0 时生成京东的订单
if(randomInt ==0){
JdOrder jdOrder = new JdOrder();
jdOrder.setAmount(BigDecimal.TEN);
jdOrder.setCreateTime(new Date());
jdOrder.setUpdateTime(new Date());
jdOrder.setCreateUser("jdUser");
jdOrder.setUpdateUser("jdUser");
jdOrder.setStatus(0);//表示为抓取
jdOrderMapper.insert(jdOrder);
}else{
TmallOrder tmallOrder = new TmallOrder();
tmallOrder.setCreateTime(new Date());
tmallOrder.setCreateUser("tmallUser");
tmallOrder.setMoney(new BigDecimal(100));
tmallOrder.setOrderStatus(0);
tmallOrder.setUpdateUser("tmallUser");
tmallOrder.setUpdateTime(new Date());
tmallOrderMapper.insert(tmallOrder);
}
}
}
- 编写Test测试类
- 启动
◇模拟订单抓取过程–导入系统
- 使用Dataflow作业完成业务需求
1.设计分片总数2,分片项0抓取京东订单,分片项1抓取天猫订单
2.Dataflow作业每15秒执行一次,每个分片抓取5个订单
package com.zcw.autoconfig;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface ElasticDataflowJob {
String jobName() default "";
String cron() default "";
int shardingTotalCount() default 1;
boolean overwrite() default false;
boolean streamingProcess() default false;
}
@Transactional(rollbackFor = Exception.class)
public void processJdOrder(AllOrder allOrder) {
allOrderMapper.insertSelective(allOrder);
JdOrder jdOrder = new JdOrder();
jdOrder.setStatus(1);//已经抓取
jdOrder.setUpdateUser("system");
jdOrder.setUpdateTime(new Date());
jdOrderMapper.updateByPrimaryKeySelective(jdOrder);
}
@Transactional(rollbackFor = Exception.class)
public void processTmallOrder(AllOrder allOrder) {
allOrderMapper.insertSelective(allOrder);
TmallOrder tmallOrder= new TmallOrder();
tmallOrder.setOrderStatus(1);
tmallOrder.setUpdateTime(new Date());
tmallOrder.setUpdateUser("system");
tmallOrderMapper.updateByPrimaryKeySelective(tmallOrder);
}
核心job
package com.zcw.springbootelasticjob.job;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.zcw.autoconfig.ElasticDataflowJob;
import com.zcw.springbootelasticjob.dao.JdOrderMapper;
import com.zcw.springbootelasticjob.dao.TmallOrderMapper;
import com.zcw.springbootelasticjob.model.AllOrder;
import com.zcw.springbootelasticjob.model.JdOrder;
import com.zcw.springbootelasticjob.model.TmallOrder;
import com.zcw.springbootelasticjob.service.OrderService;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
/**
* @ClassName : FetchThirdOrderJob
* @Description : 获取第三方订单信息
* @Author : Zhaocunwei
* @Date: 2020-06-08 13:35
*/
@ElasticDataflowJob(
jobName = "fetchThirdOrderJob",
cron = "0/15 * * * * ?",
shardingTotalCount = 2,
overwrite = true,
streamingProcess = true
)
@Component
public class FetchThirdOrderJob implements DataflowJob<Object> {
@Autowired
private OrderService orderService;
@Autowired
private JdOrderMapper jdOrderMapper;
@Autowired
private TmallOrderMapper tmallOrderMapper;
@Override
public List<Object> fetchData(ShardingContext shardingContext) {
if(shardingContext.getShardingItem()==0){
List<JdOrder> jdOrders = jdOrderMapper.getNotFetchedOrder(5);
if(jdOrders !=null && jdOrders.size()>0){
List<Object> jdOrderList = jdOrders.stream().map(jdOrder -> (Object) jdOrder).collect(Collectors.toList());
return jdOrderList;
}
}else{
List<TmallOrder> tmallOrders = tmallOrderMapper.getNotFetchedOrder(5);
if(tmallOrders !=null && tmallOrders.size()>0){
List<Object> tmallOrderList = tmallOrders.stream().map(tmallOrder -> (Object)tmallOrder)
.collect(Collectors.toList());
return tmallOrderList;
}
}
return null;
}
@Override
public void processData(ShardingContext shardingContext, List<Object> data) {
//根据分片项,区分,是哪里订单
//京东订单
if(shardingContext.getShardingItem()==0){
if(data !=null && data.size()>0){
List<JdOrder> jdOrders = data.stream().map(d -> (JdOrder) d).collect(Collectors.toList());
for (JdOrder jdOrder: jdOrders){
AllOrder allOrder = new AllOrder();
allOrder.setThirdOrderId(jdOrder.getId());
allOrder.setType(0);//表示京东ID
allOrder.setTotalAmount(jdOrder.getAmount());
allOrder.setCreateTime(new Date());
allOrder.setCreateUser("system");
allOrder.setUpdateTime(new Date());
allOrder.setUpdateUser("system");
orderService.processJdOrder(allOrder);
}
}
}else{
//天猫
if(data!=null&& data.size()>0){
List<TmallOrder> tmallOrders = data.stream().map(t->(TmallOrder)t).collect(Collectors.toList());
for(TmallOrder tmallOrder:tmallOrders){
AllOrder allOrder = new AllOrder();
allOrder.setThirdOrderId(tmallOrder.getId());
allOrder.setType(1);//表示天猫订单
allOrder.setTotalAmount(tmallOrder.getMoney());
allOrder.setCreateUser("system");
allOrder.setCreateTime(new Date());
allOrder.setUpdateUser("system");
allOrder.setUpdateTime(new Date());
orderService.processTmallOrder(allOrder);
}
}
}
}
}
- 自动配置类
package com.zcw.autoconfig;
/**
* @ClassName : DataflowJobAutoConfig
* @Description :
* @Author : Zhaocunwei
* @Date: 2020-06-09 12:44
*/
import com.dangdang.ddframe.job.api.ElasticJob;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;
import java.util.Map;
/**
* @ClassName : DataflowJobAutoConfig
* @Description :
* @Author : Zhaocunwei
* @Date: 2020-06-05 15:32
*/
@Configuration
@ConditionalOnBean(CoordinatorRegistryCenter.class)
@AutoConfigureAfter(ZookeeperAutoConfig.class)
public class DataflowJobAutoConfig {
@Autowired
private CoordinatorRegistryCenter coordinatorRegistryCenter;
@Autowired
private ApplicationContext applicationContext;
public void initDataflowJob(){
Map<String, Object> beans = applicationContext
.getBeansWithAnnotation(ElasticDataflowJob.class);
for(Map.Entry<String,Object> entry:beans.entrySet()){
Object instance = entry.getValue();
Class<?>[] interfaces = instance.getClass().getInterfaces();
for(Class<?> superInterface:interfaces){
if(superInterface == DataflowJob.class){
ElasticDataflowJob annotation = instance.getClass().getAnnotation(ElasticDataflowJob.class);
String jobName = annotation.jobName();
String cron =annotation.cron();
int shardingTotalCount = annotation.shardingTotalCount();
boolean overwrite = annotation.overwrite();
boolean streamingProcess =annotation.streamingProcess();
//job核心配置
JobCoreConfiguration buildJobCoreConfiguration = JobCoreConfiguration
.newBuilder(jobName, cron, shardingTotalCount)
.build();
//job类型配置
DataflowJobConfiguration dataflowJobConfiguration = new DataflowJobConfiguration(
buildJobCoreConfiguration,
instance.getClass().getCanonicalName(),
streamingProcess);
//job根的配置(LiteJobConfiguration)
LiteJobConfiguration buildLiteJobConfiguration = LiteJobConfiguration
.newBuilder(dataflowJobConfiguration)
.overwrite(overwrite)
.build();
// new JobScheduler(coordinatorRegistryCenter,buildLiteJobConfiguration)
// .init();
new SpringJobScheduler((ElasticJob) instance,coordinatorRegistryCenter,buildLiteJobConfiguration).init();
}
}
}
}
}
测试: