本篇spring集成:(springboot+elasticjob在下篇 https://blog.youkuaiyun.com/qq736572903/article/details/95309334)
elasticjob监控:(建议去官网下载,自己打包部署就行,这下载需要积分):https://blog.youkuaiyun.com/qq736572903/article/details/95310871
pom文件:zookeeper版本至少要3.4.6,当初因为版本卡了好久
<dependency>
<artifactId>elastic-job-lite-spring</artifactId>
<groupId>com.dangdang</groupId>
<version>2.1.5</version>
</dependency>
<dependency>
<artifactId>elastic-job-lite-core</artifactId>
<groupId>com.dangdang</groupId>
<version>2.1.5</version>
</dependency>
elasticJob.xml:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:reg="http://www.dangdang.com/schema/ddframe/reg"
xmlns:job="http://www.dangdang.com/schema/ddframe/job"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.dangdang.com/schema/ddframe/reg
http://www.dangdang.com/schema/ddframe/reg/reg.xsd
http://www.dangdang.com/schema/ddframe/job
http://www.dangdang.com/schema/ddframe/job/job.xsd
">
<!--配置作业注册中心-->
<reg:zookeeper id="regCenter" server-lists="192.168.0.102:2185" namespace="dd-job" base-sleep-time-milliseconds="1000" max-sleep-time-milliseconds="3000" max-retries="3" />
<!-- 配置简单作业-->
<job:simple id="simpleElasticJob" class="com.kahui.task.LearnElasticJob" registry-center-ref="regCenter" cron="0/10 * * * * ?" sharding-total-count="3" sharding-item-parameters="0=A,1=B,2=C" />
<!-- failover:是否开启失效转移
补充:开启失效转移的情况下,如果任务执行过程中一台服务器失去连接,那么已经分配到该服务器的任务,
将会在下次任务执行之前被当前集群中正常的服务器获取分片并执行,执行结束后再进行下一次任务;
未开启失效转移,那么服务器丢失后,程序将不作任务处理,任由其丢失,但下次任务会重新分片 -->
<!-- streaming-process:dataflow特有的——是否流式处理数据
如果为true 则数据流作业会一直执行,类似无限循环,执行完成才会继续搜集数据,如果为false,则按照cron设置的定时执行 -->
<!-- 配置数据流作业 -->
<job:dataflow id="dataflowElasticJob" class="com.kahui.task.DataFlowElasticJob" registry-center-ref="regCenter" cron="0/20 * * * * ?" sharding-total-count="3" sharding-item-parameters="0=A,1=B,2=C" streaming-process="false" />
</beans>
数据流定时任务:
package com.kahui.task;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
/**
* elastic job 数据流定时任务
*/
public class DataFlowElasticJob implements DataflowJob<String>{
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Override
public List<String> fetchData(ShardingContext context) {
int item = context.getShardingItem();
List<String> list = new ArrayList<String>();
switch (item) {
case 0:
list.add("0");
list.add("00");
list.add("000");
break;
case 1:
list.add("1");
list.add("11");
list.add("111");
break;
case 2:
list.add("2");
list.add("22");
list.add("222");
break;
default:
break;
}
return list;
}
@Override
public void processData(ShardingContext context, List<String> list) {
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
System.out.println("\n"+sdf.format(new Date()));
System.out.println(String.format("当前分片:%d,list数据:%s,%s,%s",context.getShardingItem() , list.get(0), list.get(1), list.get(2)));
}
}
简单定时任务:
package com.kahui.task;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
/**
* elastic job 简单的定时任务
*/
public class LearnElasticJob implements SimpleJob{
private static final Logger logger = LoggerFactory.getLogger(LearnElasticJob.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Override
public void execute(ShardingContext shardingContext) {
System.out.println("\n"+sdf.format(new Date()));
switch (shardingContext.getShardingItem()) {
case 0:
System.out.println("当前分片0:"+shardingContext.getShardingItem());
break;
case 1:
//do something by sharding item 1
System.out.println("当前分片1:"+shardingContext.getShardingItem());
break;
case 2:
//do something by sharding item 2
System.out.println("当前分片2:"+shardingContext.getShardingItem());
break;
//case n: ...
}
}
}