feat():移除XXL-JOB的管理端框架

boot3.0
dongzeliang 2025-02-26 16:14:35 +08:00
parent ba51301f0b
commit 32f3f9cea6
229 changed files with 68 additions and 44456 deletions

View File

@ -12,8 +12,8 @@ public class PropertyPreExcludeFilter extends SimplePropertyPreFilter {
}
public PropertyPreExcludeFilter addExcludes (String... filters) {
for (int i = 0 ; i < filters.length ; i++) {
this.getExcludes().add(filters[i]);
for (String filter : filters) {
this.getExcludes().add(filter);
}
return this;
}

View File

@ -72,15 +72,15 @@
</dependency>
<!-- XllJob定时任务 -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-xxl</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.muyu</groupId>-->
<!-- <artifactId>cloud-common-xxl</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-message</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.muyu</groupId>-->
<!-- <artifactId>cloud-common-message</artifactId>-->
<!-- </dependency>-->
</dependencies>
<build>

View File

@ -74,7 +74,7 @@ public class SysDictDataController extends BaseController {
public void export (HttpServletResponse response, SysDictDataListReq sysDictDataListReq) {
// TODO 需要重写导出查询
PageQueryModel<SysDictData> list = dictDataService.selectDictDataList(SysDictDataPageQueryModel.reqBuild(sysDictDataListReq));
ExcelUtil<SysDictData> util = new ExcelUtil<SysDictData>(SysDictData.class);
ExcelUtil<SysDictData> util = new ExcelUtil<>(SysDictData.class);
util.exportExcel(response, list.getDataList(), "字典数据");
}
@ -83,7 +83,7 @@ public class SysDictDataController extends BaseController {
*/
@RequiresPermissions("system:dict:query")
@GetMapping(value = "/{dictCode}")
public Result getInfo (@PathVariable("dictCode") Long dictCode) {
public Result<SysDictData> getInfo (@PathVariable("dictCode") Long dictCode) {
return success(dictDataService.getOneById(dictCode));
}
@ -91,7 +91,7 @@ public class SysDictDataController extends BaseController {
*
*/
@GetMapping(value = "/type/{dictType}")
public Result dictType (@PathVariable("dictType") String dictType) {
public Result<List<SysDictData>> dictType (@PathVariable("dictType") String dictType) {
List<SysDictData> data = dictTypeService.selectDictDataByType(dictType);
if (StringUtils.isNull(data)) {
data = new ArrayList<>();

View File

@ -10,6 +10,7 @@ import lombok.experimental.SuperBuilder;
/**
*
* @author dongzeliang
*/
@Data
@SuperBuilder

View File

@ -1,52 +1,52 @@
package com.muyu.system.rabbit;
import com.alibaba.fastjson2.JSONObject;
import com.muyu.system.domain.SysConfig;
import jakarta.annotation.PostConstruct;
import lombok.extern.log4j.Log4j2;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Log4j2
@RestController
@RequestMapping("/rabbit/test")
public class RabbitTest {
@Autowired
private RabbitTemplate rabbitTemplate;
@Bean
public Queue initQueue(){
return new Queue("rabbit.test.init");
}
@RabbitListener(queues = "rabbit.test.init")
public void msg(SysConfig sysConfig){
log.info("消息队列:[{}], 消息内容:[{}]", "rabbit.test.init", JSONObject.toJSONString(sysConfig));
}
@PostConstruct
public void init(){
new Thread(() -> {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
SysConfig sysConfig = SysConfig.builder()
.configId(1L)
.configKey("ceshi-key")
.configName("测试名称")
.configType("测试类型")
.configValue("测试值")
.build();
rabbitTemplate.convertAndSend("rabbit.test.init",sysConfig);
}).start();
}
}
//package com.muyu.system.rabbit;
//
//import com.alibaba.fastjson2.JSONObject;
//import com.muyu.system.domain.SysConfig;
//import jakarta.annotation.PostConstruct;
//import lombok.extern.log4j.Log4j2;
//import org.springframework.amqp.core.Queue;
//import org.springframework.amqp.rabbit.annotation.RabbitListener;
//import org.springframework.amqp.rabbit.core.RabbitTemplate;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.context.annotation.Bean;
//import org.springframework.web.bind.annotation.RequestMapping;
//import org.springframework.web.bind.annotation.RestController;
//
//@Log4j2
//@RestController
//@RequestMapping("/rabbit/test")
//public class RabbitTest {
//
// @Autowired
// private RabbitTemplate rabbitTemplate;
//
// @Bean
// public Queue initQueue(){
// return new Queue("rabbit.test.init");
// }
//
// @RabbitListener(queues = "rabbit.test.init")
// public void msg(SysConfig sysConfig){
// log.info("消息队列:[{}], 消息内容:[{}]", "rabbit.test.init", JSONObject.toJSONString(sysConfig));
// }
//
// @PostConstruct
// public void init(){
// new Thread(() -> {
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// throw new RuntimeException(e);
// }
// SysConfig sysConfig = SysConfig.builder()
// .configId(1L)
// .configKey("ceshi-key")
// .configName("测试名称")
// .configType("测试类型")
// .configValue("测试值")
// .build();
// rabbitTemplate.convertAndSend("rabbit.test.init",sysConfig);
// }).start();
// }
//
//}

View File

@ -74,6 +74,7 @@ public class SysDeptServiceImpl extends ServiceImpl<SysDeptMapper, SysDept>
StringUtils.isNotNull(sysDeptPageQueryModel.getDeptName()),SysDept::getDeptName, sysDeptPageQueryModel.getDeptName()
);
queryWrapper.eq(Objects.nonNull(sysDeptPageQueryModel.getStatus()),SysDept::getStatus,sysDeptPageQueryModel.getStatus());
queryWrapper.eq(SysDept::getDelFlag, SysDelFlagEnum.EXIST.getCode());
queryWrapper.eq(sysDeptPageQueryModel.getDeptId() != null,SysDept::getDeptId,sysDeptPageQueryModel.getDeptId());
return this.list(queryWrapper);
}
@ -176,7 +177,7 @@ public class SysDeptServiceImpl extends ServiceImpl<SysDeptMapper, SysDept>
@Override
public boolean hasChildByDeptId (Long deptId) {
LambdaQueryWrapper<SysDept> lambdaQueryWrapper = new LambdaQueryWrapper<SysDept>()
.eq(SysDept::getDeptId, deptId)
.eq(SysDept::getParentId, deptId)
.eq(SysDept::getDelFlag, SysNormalDisableEnum.ENABLE.getCode());
return this.count(lambdaQueryWrapper) > 0;
}

View File

@ -1,149 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.xuxueli</groupId>
<artifactId>xxl-job</artifactId>
<version>2.5.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<description>A distributed task scheduling framework.</description>
<url>https://www.xuxueli.com/</url>
<modules>
<module>xxl-job-core</module>
<module>xxl-job-admin</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.compiler.encoding>UTF-8</maven.compiler.encoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.test.skip>true</maven.test.skip>
<netty.version>4.1.115.Final</netty.version>
<gson.version>2.11.0</gson.version>
<spring.version>5.3.39</spring.version>
<spring-boot.version>2.7.18</spring-boot.version>
<mybatis-spring-boot-starter.version>2.3.2</mybatis-spring-boot-starter.version>
<mysql-connector-j.version>9.1.0</mysql-connector-j.version>
<slf4j-api.version>2.0.16</slf4j-api.version>
<junit-jupiter.version>5.11.3</junit-jupiter.version>
<javax.annotation-api.version>1.3.2</javax.annotation-api.version>
<groovy.version>4.0.24</groovy.version>
<maven-source-plugin.version>3.3.1</maven-source-plugin.version>
<maven-javadoc-plugin.version>3.11.1</maven-javadoc-plugin.version>
<maven-gpg-plugin.version>3.2.7</maven-gpg-plugin.version>
</properties>
<build>
<plugins>
</plugins>
</build>
<licenses>
<license>
<name>GNU General Public License version 3</name>
<url>https://opensource.org/licenses/GPL-3.0</url>
</license>
</licenses>
<scm>
<tag>master</tag>
<url>https://github.com/xuxueli/xxl-job.git</url>
<connection>scm:git:https://github.com/xuxueli/xxl-job.git</connection>
<developerConnection>scm:git:git@github.com:xuxueli/xxl-job.git</developerConnection>
</scm>
<developers>
<developer>
<id>XXL</id>
<name>xuxueli</name>
<email>931591021@qq.com</email>
<url>https://github.com/xuxueli</url>
</developer>
</developers>
<profiles>
<profile>
<id>release</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>xxl-job-core</module>
</modules>
<build>
<plugins>
<!-- Source -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Javadoc -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<doclint>none</doclint>
</configuration>
</execution>
</executions>
</plugin>
<!-- GPG -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>${maven-gpg-plugin.version}</version>
<configuration>
<useAgent>false</useAgent>
</configuration>
<executions>
<execution>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<distributionManagement>
<snapshotRepository>
<id>oss</id>
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
</snapshotRepository>
<repository>
<id>oss</id>
<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>
</repository>
</distributionManagement>
</profile>
</profiles>
</project>

View File

@ -1,11 +0,0 @@
FROM openjdk:8-jre-slim
MAINTAINER xuxueli
ENV PARAMS=""
ENV TZ=PRC
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
ADD target/xxl-job-admin-*.jar /app.jar
ENTRYPOINT ["sh","-c","java -jar $JAVA_OPTS /app.jar $PARAMS"]

View File

@ -1,119 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.xuxueli</groupId>
<artifactId>xxl-job</artifactId>
<version>2.5.0-SNAPSHOT</version>
</parent>
<artifactId>xxl-job-admin</artifactId>
<packaging>jar</packaging>
<properties>
<maven.deploy.skip>true</maven.deploy.skip>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!-- starter-webspring-webmvc + autoconfigure + logback + yaml + tomcat -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<!-- starter-testjunit + spring-test + mockito -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!-- freemarker-starter -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-freemarker</artifactId>
</dependency>
<!-- mail-starter -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<!-- starter-actuator -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- mybatis-startermybatis + mybatis-spring + hikaridefault -->
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>${mybatis-spring-boot-starter.version}</version>
</dependency>
<!-- mysql -->
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
<version>${mysql-connector-j.version}</version>
</dependency>
<!-- xxl-job-core -->
<dependency>
<groupId>com.xuxueli</groupId>
<artifactId>xxl-job-core</artifactId>
<version>${project.parent.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- docker -->
<!--<plugin>
<groupId>com.spotify</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.4.13</version>
<configuration>
&lt;!&ndash; made of '[a-z0-9-_.]' &ndash;&gt;
<imageName>${project.artifactId}:${project.version}</imageName>
<dockerDirectory>${project.basedir}</dockerDirectory>
<resources>
<resource>
<targetPath>/</targetPath>
<directory>${project.build.directory}</directory>
<include>${project.build.finalName}.jar</include>
</resource>
</resources>
</configuration>
</plugin>-->
</plugins>
</build>
</project>

View File

@ -1,16 +0,0 @@
package com.xxl.job.admin;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @author xuxueli 2018-10-28 00:38:13
*/
@SpringBootApplication
public class XxlJobAdminApplication {
public static void main(String[] args) {
SpringApplication.run(XxlJobAdminApplication.class, args);
}
}

View File

@ -1,96 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.service.impl.LoginService;
import com.xxl.job.admin.service.XxlJobService;
import com.xxl.job.core.biz.model.ReturnT;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.view.RedirectView;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
/**
* index controller
* @author xuxueli 2015-12-19 16:13:16
*/
@Controller
public class IndexController {
@Resource
private XxlJobService xxlJobService;
@Resource
private LoginService loginService;
@RequestMapping("/")
public String index(Model model) {
Map<String, Object> dashboardMap = xxlJobService.dashboardInfo();
model.addAllAttributes(dashboardMap);
return "index";
}
@RequestMapping("/chartInfo")
@ResponseBody
public ReturnT<Map<String, Object>> chartInfo(Date startDate, Date endDate) {
ReturnT<Map<String, Object>> chartInfo = xxlJobService.chartInfo(startDate, endDate);
return chartInfo;
}
@RequestMapping("/toLogin")
@PermissionLimit(limit=false)
public ModelAndView toLogin(HttpServletRequest request, HttpServletResponse response,ModelAndView modelAndView) {
if (loginService.ifLogin(request, response) != null) {
modelAndView.setView(new RedirectView("/",true,false));
return modelAndView;
}
return new ModelAndView("login");
}
@RequestMapping(value="login", method=RequestMethod.POST)
@ResponseBody
@PermissionLimit(limit=false)
public ReturnT<String> loginDo(HttpServletRequest request, HttpServletResponse response, String userName, String password, String ifRemember){
boolean ifRem = (ifRemember!=null && ifRemember.trim().length()>0 && "on".equals(ifRemember))?true:false;
return loginService.login(request, response, userName, password, ifRem);
}
@RequestMapping(value="logout", method=RequestMethod.POST)
@ResponseBody
@PermissionLimit(limit=false)
public ReturnT<String> logout(HttpServletRequest request, HttpServletResponse response){
return loginService.logout(request, response);
}
@RequestMapping("/help")
public String help() {
/*if (!PermissionInterceptor.ifLogin(request)) {
return "redirect:/toLogin";
}*/
return "help";
}
@InitBinder
public void initBinder(WebDataBinder binder) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
dateFormat.setLenient(false);
binder.registerCustomEditor(Date.class, new CustomDateEditor(dateFormat, true));
}
}

View File

@ -1,72 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.core.biz.AdminBiz;
import com.xxl.job.core.biz.model.HandleCallbackParam;
import com.xxl.job.core.biz.model.RegistryParam;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.util.GsonTool;
import com.xxl.job.core.util.XxlJobRemotingUtil;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
* Created by xuxueli on 17/5/10.
*/
@Controller
@RequestMapping("/api")
public class JobApiController {
@Resource
private AdminBiz adminBiz;
/**
* api
*
* @param uri
* @param data
* @return
*/
@RequestMapping("/{uri}")
@ResponseBody
@PermissionLimit(limit=false)
public ReturnT<String> api(HttpServletRequest request, @PathVariable("uri") String uri, @RequestBody(required = false) String data) {
// valid
if (!"POST".equalsIgnoreCase(request.getMethod())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "invalid request, HttpMethod not support.");
}
if (uri==null || uri.trim().length()==0) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "invalid request, uri-mapping empty.");
}
if (XxlJobAdminConfig.getAdminConfig().getAccessToken()!=null
&& XxlJobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !XxlJobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(XxlJobRemotingUtil.XXL_JOB_ACCESS_TOKEN))) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// services mapping
if ("callback".equals(uri)) {
List<HandleCallbackParam> callbackParamList = GsonTool.fromJson(data, List.class, HandleCallbackParam.class);
return adminBiz.callback(callbackParamList);
} else if ("registry".equals(uri)) {
RegistryParam registryParam = GsonTool.fromJson(data, RegistryParam.class);
return adminBiz.registry(registryParam);
} else if ("registryRemove".equals(uri)) {
RegistryParam registryParam = GsonTool.fromJson(data, RegistryParam.class);
return adminBiz.registryRemove(registryParam);
} else {
return new ReturnT<String>(ReturnT.FAIL_CODE, "invalid request, uri-mapping("+ uri +") not found.");
}
}
}

View File

@ -1,100 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.interceptor.PermissionInterceptor;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLogGlue;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobInfoDao;
import com.xxl.job.admin.dao.XxlJobLogGlueDao;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.glue.GlueTypeEnum;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.List;
/**
* job code controller
* @author xuxueli 2015-12-19 16:13:16
*/
@Controller
@RequestMapping("/jobcode")
public class JobCodeController {
@Resource
private XxlJobInfoDao xxlJobInfoDao;
@Resource
private XxlJobLogGlueDao xxlJobLogGlueDao;
@RequestMapping
public String index(HttpServletRequest request, Model model, int jobId) {
XxlJobInfo jobInfo = xxlJobInfoDao.loadById(jobId);
List<XxlJobLogGlue> jobLogGlues = xxlJobLogGlueDao.findByJobId(jobId);
if (jobInfo == null) {
throw new RuntimeException(I18nUtil.getString("jobinfo_glue_jobid_unvalid"));
}
if (GlueTypeEnum.BEAN == GlueTypeEnum.match(jobInfo.getGlueType())) {
throw new RuntimeException(I18nUtil.getString("jobinfo_glue_gluetype_unvalid"));
}
// valid permission
PermissionInterceptor.validJobGroupPermission(request, jobInfo.getJobGroup());
// Glue类型-字典
model.addAttribute("GlueTypeEnum", GlueTypeEnum.values());
model.addAttribute("jobInfo", jobInfo);
model.addAttribute("jobLogGlues", jobLogGlues);
return "jobcode/jobcode.index";
}
@RequestMapping("/save")
@ResponseBody
public ReturnT<String> save(HttpServletRequest request, int id, String glueSource, String glueRemark) {
// valid
if (glueRemark==null) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")) );
}
if (glueRemark.length()<4 || glueRemark.length()>100) {
return new ReturnT<String>(500, I18nUtil.getString("jobinfo_glue_remark_limit"));
}
XxlJobInfo existsJobInfo = xxlJobInfoDao.loadById(id);
if (existsJobInfo == null) {
return new ReturnT<String>(500, I18nUtil.getString("jobinfo_glue_jobid_unvalid"));
}
// valid permission
PermissionInterceptor.validJobGroupPermission(request, existsJobInfo.getJobGroup());
// update new code
existsJobInfo.setGlueSource(glueSource);
existsJobInfo.setGlueRemark(glueRemark);
existsJobInfo.setGlueUpdatetime(new Date());
existsJobInfo.setUpdateTime(new Date());
xxlJobInfoDao.update(existsJobInfo);
// log old code
XxlJobLogGlue xxlJobLogGlue = new XxlJobLogGlue();
xxlJobLogGlue.setJobId(existsJobInfo.getId());
xxlJobLogGlue.setGlueType(existsJobInfo.getGlueType());
xxlJobLogGlue.setGlueSource(glueSource);
xxlJobLogGlue.setGlueRemark(glueRemark);
xxlJobLogGlue.setAddTime(new Date());
xxlJobLogGlue.setUpdateTime(new Date());
xxlJobLogGlueDao.save(xxlJobLogGlue);
// remove code backup more than 30
xxlJobLogGlueDao.removeOld(existsJobInfo.getId(), 30);
return ReturnT.SUCCESS;
}
}

View File

@ -1,207 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobRegistry;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobGroupDao;
import com.xxl.job.admin.dao.XxlJobInfoDao;
import com.xxl.job.admin.dao.XxlJobRegistryDao;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.enums.RegistryConfig;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
/**
* job group controller
* @author xuxueli 2016-10-02 20:52:56
*/
@Controller
@RequestMapping("/jobgroup")
public class JobGroupController {
@Resource
public XxlJobInfoDao xxlJobInfoDao;
@Resource
public XxlJobGroupDao xxlJobGroupDao;
@Resource
private XxlJobRegistryDao xxlJobRegistryDao;
@RequestMapping
@PermissionLimit(adminuser = true)
public String index(Model model) {
return "jobgroup/jobgroup.index";
}
@RequestMapping("/pageList")
@ResponseBody
@PermissionLimit(adminuser = true)
public Map<String, Object> pageList(HttpServletRequest request,
@RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length,
String appname, String title) {
// page query
List<XxlJobGroup> list = xxlJobGroupDao.pageList(start, length, appname, title);
int list_count = xxlJobGroupDao.pageListCount(start, length, appname, title);
// package result
Map<String, Object> maps = new HashMap<String, Object>();
// 总记录数
maps.put("recordsTotal", list_count);
// 过滤后的总记录数
maps.put("recordsFiltered", list_count);
// 分页列表
maps.put("data", list);
return maps;
}
@RequestMapping("/save")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> save(XxlJobGroup xxlJobGroup){
// valid
if (xxlJobGroup.getAppname()==null || xxlJobGroup.getAppname().trim().length()==0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input")+"AppName") );
}
if (xxlJobGroup.getAppname().length()<4 || xxlJobGroup.getAppname().length()>64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appname_length") );
}
if (xxlJobGroup.getAppname().contains(">") || xxlJobGroup.getAppname().contains("<")) {
return new ReturnT<String>(500, "AppName"+I18nUtil.getString("system_unvalid") );
}
if (xxlJobGroup.getTitle()==null || xxlJobGroup.getTitle().trim().length()==0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")) );
}
if (xxlJobGroup.getTitle().contains(">") || xxlJobGroup.getTitle().contains("<")) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_title")+I18nUtil.getString("system_unvalid") );
}
if (xxlJobGroup.getAddressType()!=0) {
if (xxlJobGroup.getAddressList()==null || xxlJobGroup.getAddressList().trim().length()==0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit") );
}
if (xxlJobGroup.getAddressList().contains(">") || xxlJobGroup.getAddressList().contains("<")) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList")+I18nUtil.getString("system_unvalid") );
}
String[] addresss = xxlJobGroup.getAddressList().split(",");
for (String item: addresss) {
if (item==null || item.trim().length()==0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_unvalid") );
}
}
}
// process
xxlJobGroup.setUpdateTime(new Date());
int ret = xxlJobGroupDao.save(xxlJobGroup);
return (ret>0)?ReturnT.SUCCESS:ReturnT.FAIL;
}
@RequestMapping("/update")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> update(XxlJobGroup xxlJobGroup){
// valid
if (xxlJobGroup.getAppname()==null || xxlJobGroup.getAppname().trim().length()==0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input")+"AppName") );
}
if (xxlJobGroup.getAppname().length()<4 || xxlJobGroup.getAppname().length()>64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appname_length") );
}
if (xxlJobGroup.getTitle()==null || xxlJobGroup.getTitle().trim().length()==0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")) );
}
if (xxlJobGroup.getAddressType() == 0) {
// 0=自动注册
List<String> registryList = findRegistryByAppName(xxlJobGroup.getAppname());
String addressListStr = null;
if (registryList!=null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item:registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length()-1);
}
xxlJobGroup.setAddressList(addressListStr);
} else {
// 1=手动录入
if (xxlJobGroup.getAddressList()==null || xxlJobGroup.getAddressList().trim().length()==0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit") );
}
String[] addresss = xxlJobGroup.getAddressList().split(",");
for (String item: addresss) {
if (item==null || item.trim().length()==0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_unvalid") );
}
}
}
// process
xxlJobGroup.setUpdateTime(new Date());
int ret = xxlJobGroupDao.update(xxlJobGroup);
return (ret>0)?ReturnT.SUCCESS:ReturnT.FAIL;
}
private List<String> findRegistryByAppName(String appnameParam){
HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>();
List<XxlJobRegistry> list = xxlJobRegistryDao.findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (XxlJobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appname = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appname);
if (registryList == null) {
registryList = new ArrayList<String>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appname, registryList);
}
}
}
return appAddressMap.get(appnameParam);
}
@RequestMapping("/remove")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> remove(int id){
// valid
int count = xxlJobInfoDao.pageListCount(0, 10, id, -1, null, null, null);
if (count > 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_del_limit_0") );
}
List<XxlJobGroup> allList = xxlJobGroupDao.findAll();
if (allList.size() == 1) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_del_limit_1") );
}
int ret = xxlJobGroupDao.remove(id);
return (ret>0)?ReturnT.SUCCESS:ReturnT.FAIL;
}
@RequestMapping("/loadById")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<XxlJobGroup> loadById(int id){
XxlJobGroup jobGroup = xxlJobGroupDao.load(id);
return jobGroup!=null?new ReturnT<XxlJobGroup>(jobGroup):new ReturnT<XxlJobGroup>(ReturnT.FAIL_CODE, null);
}
}

View File

@ -1,160 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.interceptor.PermissionInterceptor;
import com.xxl.job.admin.core.exception.XxlJobException;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.route.ExecutorRouteStrategyEnum;
import com.xxl.job.admin.core.scheduler.MisfireStrategyEnum;
import com.xxl.job.admin.core.scheduler.ScheduleTypeEnum;
import com.xxl.job.admin.core.thread.JobScheduleHelper;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobGroupDao;
import com.xxl.job.admin.service.XxlJobService;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.enums.ExecutorBlockStrategyEnum;
import com.xxl.job.core.glue.GlueTypeEnum;
import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
/**
* index controller
* @author xuxueli 2015-12-19 16:13:16
*/
@Controller
@RequestMapping("/jobinfo")
public class JobInfoController {
private static Logger logger = LoggerFactory.getLogger(JobInfoController.class);
@Resource
private XxlJobGroupDao xxlJobGroupDao;
@Resource
private XxlJobService xxlJobService;
@RequestMapping
public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "-1") int jobGroup) {
// 枚举-字典
// 路由策略-列表
model.addAttribute("ExecutorRouteStrategyEnum", ExecutorRouteStrategyEnum.values());
// Glue类型-字典
model.addAttribute("GlueTypeEnum", GlueTypeEnum.values());
// 阻塞处理策略-字典
model.addAttribute("ExecutorBlockStrategyEnum", ExecutorBlockStrategyEnum.values());
// 调度类型
model.addAttribute("ScheduleTypeEnum", ScheduleTypeEnum.values());
// 调度过期策略
model.addAttribute("MisfireStrategyEnum", MisfireStrategyEnum.values());
// 执行器列表
List<XxlJobGroup> jobGroupList1 = xxlJobGroupDao.findAll();
// filter group
List<XxlJobGroup> jobGroupList = PermissionInterceptor.filterJobGroupByRole(request, jobGroupList1);
if (jobGroupList==null || jobGroupList.isEmpty()) {
throw new XxlJobException(I18nUtil.getString("jobgroup_empty"));
}
model.addAttribute("JobGroupList", jobGroupList);
model.addAttribute("jobGroup", jobGroup);
return "jobinfo/jobinfo.index";
}
@RequestMapping("/pageList")
@ResponseBody
public Map<String, Object> pageList(@RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length,
int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) {
return xxlJobService.pageList(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author);
}
@RequestMapping("/add")
@ResponseBody
public ReturnT<String> add(HttpServletRequest request, XxlJobInfo jobInfo) {
// valid permission
PermissionInterceptor.validJobGroupPermission(request, jobInfo.getJobGroup());
// opt
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
return xxlJobService.add(jobInfo, loginUser);
}
@RequestMapping("/update")
@ResponseBody
public ReturnT<String> update(HttpServletRequest request, XxlJobInfo jobInfo) {
// valid permission
PermissionInterceptor.validJobGroupPermission(request, jobInfo.getJobGroup());
// opt
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
return xxlJobService.update(jobInfo, loginUser);
}
@RequestMapping("/remove")
@ResponseBody
public ReturnT<String> remove(int id) {
return xxlJobService.remove(id);
}
@RequestMapping("/stop")
@ResponseBody
public ReturnT<String> pause(int id) {
return xxlJobService.stop(id);
}
@RequestMapping("/start")
@ResponseBody
public ReturnT<String> start(int id) {
return xxlJobService.start(id);
}
@RequestMapping("/trigger")
@ResponseBody
public ReturnT<String> triggerJob(HttpServletRequest request, int id, String executorParam, String addressList) {
// login user
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
// trigger
return xxlJobService.trigger(loginUser, id, executorParam, addressList);
}
@RequestMapping("/nextTriggerTime")
@ResponseBody
public ReturnT<List<String>> nextTriggerTime(String scheduleType, String scheduleConf) {
XxlJobInfo paramXxlJobInfo = new XxlJobInfo();
paramXxlJobInfo.setScheduleType(scheduleType);
paramXxlJobInfo.setScheduleConf(scheduleConf);
List<String> result = new ArrayList<>();
try {
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = JobScheduleHelper.generateNextValidTime(paramXxlJobInfo, lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<List<String>>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) + e.getMessage());
}
return new ReturnT<List<String>>(result);
}
}

View File

@ -1,263 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.interceptor.PermissionInterceptor;
import com.xxl.job.admin.core.complete.XxlJobCompleter;
import com.xxl.job.admin.core.exception.XxlJobException;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobGroupDao;
import com.xxl.job.admin.dao.XxlJobInfoDao;
import com.xxl.job.admin.dao.XxlJobLogDao;
import com.xxl.job.core.biz.ExecutorBiz;
import com.xxl.job.core.biz.model.KillParam;
import com.xxl.job.core.biz.model.LogParam;
import com.xxl.job.core.biz.model.LogResult;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.util.HtmlUtils;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* index controller
* @author xuxueli 2015-12-19 16:13:16
*/
@Controller
@RequestMapping("/joblog")
public class JobLogController {
private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
@Resource
private XxlJobGroupDao xxlJobGroupDao;
@Resource
public XxlJobInfoDao xxlJobInfoDao;
@Resource
public XxlJobLogDao xxlJobLogDao;
@RequestMapping
public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "0") Integer jobId) {
// 执行器列表
List<XxlJobGroup> jobGroupList_all = xxlJobGroupDao.findAll();
// filter group
List<XxlJobGroup> jobGroupList = PermissionInterceptor.filterJobGroupByRole(request, jobGroupList_all);
if (jobGroupList==null || jobGroupList.size()==0) {
throw new XxlJobException(I18nUtil.getString("jobgroup_empty"));
}
model.addAttribute("JobGroupList", jobGroupList);
// 任务
if (jobId > 0) {
XxlJobInfo jobInfo = xxlJobInfoDao.loadById(jobId);
if (jobInfo == null) {
throw new RuntimeException(I18nUtil.getString("jobinfo_field_id") + I18nUtil.getString("system_unvalid"));
}
model.addAttribute("jobInfo", jobInfo);
// valid permission
PermissionInterceptor.validJobGroupPermission(request, jobInfo.getJobGroup());
}
return "joblog/joblog.index";
}
@RequestMapping("/getJobsByGroup")
@ResponseBody
public ReturnT<List<XxlJobInfo>> getJobsByGroup(int jobGroup){
List<XxlJobInfo> list = xxlJobInfoDao.getJobsByGroup(jobGroup);
return new ReturnT<List<XxlJobInfo>>(list);
}
@RequestMapping("/pageList")
@ResponseBody
public Map<String, Object> pageList(HttpServletRequest request,
@RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length,
int jobGroup, int jobId, int logStatus, String filterTime) {
// valid permission 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup
PermissionInterceptor.validJobGroupPermission(request, jobGroup);
// parse param
Date triggerTimeStart = null;
Date triggerTimeEnd = null;
if (filterTime!=null && !filterTime.trim().isEmpty()) {
String[] temp = filterTime.split(" - ");
if (temp.length == 2) {
triggerTimeStart = DateUtil.parseDateTime(temp[0]);
triggerTimeEnd = DateUtil.parseDateTime(temp[1]);
}
}
// page query
List<XxlJobLog> list = xxlJobLogDao.pageList(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
int list_count = xxlJobLogDao.pageListCount(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
// package result
Map<String, Object> maps = new HashMap<String, Object>();
// 总记录数
maps.put("recordsTotal", list_count);
// 过滤后的总记录数
maps.put("recordsFiltered", list_count);
// 分页列表
maps.put("data", list);
return maps;
}
@RequestMapping("/logDetailPage")
public String logDetailPage(int id, Model model){
// base check
ReturnT<String> logStatue = ReturnT.SUCCESS;
XxlJobLog jobLog = xxlJobLogDao.load(id);
if (jobLog == null) {
throw new RuntimeException(I18nUtil.getString("joblog_logid_unvalid"));
}
model.addAttribute("triggerCode", jobLog.getTriggerCode());
model.addAttribute("handleCode", jobLog.getHandleCode());
model.addAttribute("logId", jobLog.getId());
return "joblog/joblog.detail";
}
@RequestMapping("/logDetailCat")
@ResponseBody
public ReturnT<LogResult> logDetailCat(long logId, int fromLineNum){
try {
// valid
// todo, need to improve performance
XxlJobLog jobLog = xxlJobLogDao.load(logId);
if (jobLog == null) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_logid_unvalid"));
}
// log cat
ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(jobLog.getExecutorAddress());
ReturnT<LogResult> logResult = executorBiz.log(new LogParam(jobLog.getTriggerTime().getTime(), logId, fromLineNum));
// is end
if (logResult.getContent()!=null && logResult.getContent().getFromLineNum() > logResult.getContent().getToLineNum()) {
if (jobLog.getHandleCode() > 0) {
logResult.getContent().setEnd(true);
}
}
// fix xss
if (logResult.getContent()!=null && StringUtils.hasText(logResult.getContent().getLogContent())) {
String newLogContent = logResult.getContent().getLogContent();
newLogContent = HtmlUtils.htmlEscape(newLogContent, "UTF-8");
logResult.getContent().setLogContent(newLogContent);
}
return logResult;
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<LogResult>(ReturnT.FAIL_CODE, e.getMessage());
}
}
@RequestMapping("/logKill")
@ResponseBody
public ReturnT<String> logKill(int id){
// base check
XxlJobLog log = xxlJobLogDao.load(id);
XxlJobInfo jobInfo = xxlJobInfoDao.loadById(log.getJobId());
if (jobInfo==null) {
return new ReturnT<String>(500, I18nUtil.getString("jobinfo_glue_jobid_unvalid"));
}
if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
return new ReturnT<String>(500, I18nUtil.getString("joblog_kill_log_limit"));
}
// request of kill
ReturnT<String> runResult = null;
try {
ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(log.getExecutorAddress());
runResult = executorBiz.kill(new KillParam(jobInfo.getId()));
} catch (Exception e) {
logger.error(e.getMessage(), e);
runResult = new ReturnT<>(500, e.getMessage());
}
if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
log.setHandleCode(ReturnT.FAIL_CODE);
log.setHandleMsg( I18nUtil.getString("joblog_kill_log_byman")+":" + (runResult.getMsg()!=null?runResult.getMsg():""));
log.setHandleTime(new Date());
XxlJobCompleter.updateHandleInfoAndFinish(log);
return new ReturnT<>(runResult.getMsg());
} else {
return new ReturnT<>(500, runResult.getMsg());
}
}
@RequestMapping("/clearLog")
@ResponseBody
public ReturnT<String> clearLog(HttpServletRequest request, int jobGroup, int jobId, int type){
// valid permission
PermissionInterceptor.validJobGroupPermission(request, jobGroup);
// opt
Date clearBeforeTime = null;
int clearBeforeNum = 0;
if (type == 1) {
// 清理一个月之前日志数据
clearBeforeTime = DateUtil.addMonths(new Date(), -1);
} else if (type == 2) {
// 清理三个月之前日志数据
clearBeforeTime = DateUtil.addMonths(new Date(), -3);
} else if (type == 3) {
// 清理六个月之前日志数据
clearBeforeTime = DateUtil.addMonths(new Date(), -6);
} else if (type == 4) {
// 清理一年之前日志数据
clearBeforeTime = DateUtil.addYears(new Date(), -1);
} else if (type == 5) {
// 清理一千条以前日志数据
clearBeforeNum = 1000;
} else if (type == 6) {
// 清理一万条以前日志数据
clearBeforeNum = 10000;
} else if (type == 7) {
// 清理三万条以前日志数据
clearBeforeNum = 30000;
} else if (type == 8) {
// 清理十万条以前日志数据
clearBeforeNum = 100000;
} else if (type == 9) {
// 清理所有日志数据
clearBeforeNum = 0;
} else {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_unvalid"));
}
List<Long> logIds = null;
do {
logIds = xxlJobLogDao.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
if (logIds!=null && !logIds.isEmpty()) {
xxlJobLogDao.clearLog(logIds);
}
} while (logIds!=null && !logIds.isEmpty());
return ReturnT.SUCCESS;
}
}

View File

@ -1,189 +0,0 @@
package com.xxl.job.admin.controller;
import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.controller.interceptor.PermissionInterceptor;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobGroupDao;
import com.xxl.job.admin.dao.XxlJobUserDao;
import com.xxl.job.core.biz.model.ReturnT;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.DigestUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author xuxueli 2019-05-04 16:39:50
*/
@Controller
@RequestMapping("/user")
public class JobUserController {
@Resource
private XxlJobUserDao xxlJobUserDao;
@Resource
private XxlJobGroupDao xxlJobGroupDao;
@RequestMapping
@PermissionLimit(adminuser = true)
public String index(Model model) {
// 执行器列表
List<XxlJobGroup> groupList = xxlJobGroupDao.findAll();
model.addAttribute("groupList", groupList);
return "user/user.index";
}
@RequestMapping("/pageList")
@ResponseBody
@PermissionLimit(adminuser = true)
public Map<String, Object> pageList(@RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length,
String username, int role) {
// page list
List<XxlJobUser> list = xxlJobUserDao.pageList(start, length, username, role);
int list_count = xxlJobUserDao.pageListCount(start, length, username, role);
// filter
if (list!=null && list.size()>0) {
for (XxlJobUser item: list) {
item.setPassword(null);
}
}
// package result
Map<String, Object> maps = new HashMap<String, Object>();
// 总记录数
maps.put("recordsTotal", list_count);
// 过滤后的总记录数
maps.put("recordsFiltered", list_count);
// 分页列表
maps.put("data", list);
return maps;
}
@RequestMapping("/add")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> add(XxlJobUser xxlJobUser) {
// valid username
if (!StringUtils.hasText(xxlJobUser.getUsername())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_please_input")+I18nUtil.getString("user_username") );
}
xxlJobUser.setUsername(xxlJobUser.getUsername().trim());
if (!(xxlJobUser.getUsername().length()>=4 && xxlJobUser.getUsername().length()<=20)) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_lengh_limit")+"[4-20]" );
}
// valid password
if (!StringUtils.hasText(xxlJobUser.getPassword())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_please_input")+I18nUtil.getString("user_password") );
}
xxlJobUser.setPassword(xxlJobUser.getPassword().trim());
if (!(xxlJobUser.getPassword().length()>=4 && xxlJobUser.getPassword().length()<=20)) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_lengh_limit")+"[4-20]" );
}
// md5 password
xxlJobUser.setPassword(DigestUtils.md5DigestAsHex(xxlJobUser.getPassword().getBytes()));
// check repeat
XxlJobUser existUser = xxlJobUserDao.loadByUserName(xxlJobUser.getUsername());
if (existUser != null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("user_username_repeat") );
}
// write
xxlJobUserDao.save(xxlJobUser);
return ReturnT.SUCCESS;
}
@RequestMapping("/update")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> update(HttpServletRequest request, XxlJobUser xxlJobUser) {
// avoid opt login seft
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
if (loginUser.getUsername().equals(xxlJobUser.getUsername())) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("user_update_loginuser_limit"));
}
// valid password
if (StringUtils.hasText(xxlJobUser.getPassword())) {
xxlJobUser.setPassword(xxlJobUser.getPassword().trim());
if (!(xxlJobUser.getPassword().length()>=4 && xxlJobUser.getPassword().length()<=20)) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_lengh_limit")+"[4-20]" );
}
// md5 password
xxlJobUser.setPassword(DigestUtils.md5DigestAsHex(xxlJobUser.getPassword().getBytes()));
} else {
xxlJobUser.setPassword(null);
}
// write
xxlJobUserDao.update(xxlJobUser);
return ReturnT.SUCCESS;
}
@RequestMapping("/remove")
@ResponseBody
@PermissionLimit(adminuser = true)
public ReturnT<String> remove(HttpServletRequest request, int id) {
// avoid opt login seft
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
if (loginUser.getId() == id) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("user_update_loginuser_limit"));
}
xxlJobUserDao.delete(id);
return ReturnT.SUCCESS;
}
@RequestMapping("/updatePwd")
@ResponseBody
public ReturnT<String> updatePwd(HttpServletRequest request, String password, String oldPassword){
// valid
if (oldPassword==null || oldPassword.trim().length()==0){
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("system_please_input") + I18nUtil.getString("change_pwd_field_oldpwd"));
}
if (password==null || password.trim().length()==0){
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("system_please_input") + I18nUtil.getString("change_pwd_field_oldpwd"));
}
password = password.trim();
if (!(password.length()>=4 && password.length()<=20)) {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("system_lengh_limit")+"[4-20]" );
}
// md5 password
String md5OldPassword = DigestUtils.md5DigestAsHex(oldPassword.getBytes());
String md5Password = DigestUtils.md5DigestAsHex(password.getBytes());
// valid old pwd
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
XxlJobUser existUser = xxlJobUserDao.loadByUserName(loginUser.getUsername());
if (!md5OldPassword.equals(existUser.getPassword())) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("change_pwd_field_oldpwd") + I18nUtil.getString("system_unvalid"));
}
// write new
existUser.setPassword(md5Password);
xxlJobUserDao.update(existUser);
return ReturnT.SUCCESS;
}
}

View File

@ -1,29 +0,0 @@
package com.xxl.job.admin.controller.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* @author xuxueli 2015-12-12 18:29:02
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface PermissionLimit {
/**
* ()
*/
boolean limit() default true;
/**
*
*
* @return
*/
boolean adminuser() default false;
}

View File

@ -1,42 +0,0 @@
package com.xxl.job.admin.controller.interceptor;
import com.xxl.job.admin.core.util.FtlUtil;
import com.xxl.job.admin.core.util.I18nUtil;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.AsyncHandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
/**
* push cookies to model as cookieMap
*
* @author xuxueli 2015-12-12 18:09:04
*/
@Component
public class CookieInterceptor implements AsyncHandlerInterceptor {
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
// cookie
if (modelAndView!=null && request.getCookies()!=null && request.getCookies().length>0) {
HashMap<String, Cookie> cookieMap = new HashMap<String, Cookie>();
for (Cookie ck : request.getCookies()) {
cookieMap.put(ck.getName(), ck);
}
modelAndView.addObject("cookieMap", cookieMap);
}
// static method
if (modelAndView != null) {
modelAndView.addObject("I18nUtil", FtlUtil.generateStaticModel(I18nUtil.class.getName()));
}
}
}

View File

@ -1,122 +0,0 @@
package com.xxl.job.admin.controller.interceptor;
import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.service.impl.LoginService;
import org.springframework.stereotype.Component;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.AsyncHandlerInterceptor;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
*
*
* @author xuxueli 2015-12-12 18:09:04
*/
@Component
public class PermissionInterceptor implements AsyncHandlerInterceptor {
@Resource
private LoginService loginService;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
if (!(handler instanceof HandlerMethod)) {
// proceed with the next interceptor
return true;
}
// if need login
boolean needLogin = true;
boolean needAdminuser = false;
HandlerMethod method = (HandlerMethod)handler;
PermissionLimit permission = method.getMethodAnnotation(PermissionLimit.class);
if (permission!=null) {
needLogin = permission.limit();
needAdminuser = permission.adminuser();
}
if (needLogin) {
XxlJobUser loginUser = loginService.ifLogin(request, response);
if (loginUser == null) {
response.setStatus(302);
response.setHeader("location", request.getContextPath()+"/toLogin");
return false;
}
if (needAdminuser && loginUser.getRole()!=1) {
throw new RuntimeException(I18nUtil.getString("system_permission_limit"));
}
// set loginUser, with request
request.setAttribute(LoginService.LOGIN_IDENTITY_KEY, loginUser);
}
// proceed with the next interceptor
return true;
}
// -------------------- permission tool --------------------
/**
* get loginUser
*
* @param request
* @return
*/
public static XxlJobUser getLoginUser(HttpServletRequest request){
// get loginUser, with request
return (XxlJobUser) request.getAttribute(LoginService.LOGIN_IDENTITY_KEY);
}
/**
* valid permission by JobGroup
*
* @param request
* @param jobGroup
*/
public static void validJobGroupPermission(HttpServletRequest request, int jobGroup) {
XxlJobUser loginUser = getLoginUser(request);
if (!loginUser.validPermission(jobGroup)) {
throw new RuntimeException(I18nUtil.getString("system_permission_limit") + "[username="+ loginUser.getUsername() +"]");
}
}
/**
* filter XxlJobGroup by role
*
* @param request
* @param jobGroupList_all
* @return
*/
public static List<XxlJobGroup> filterJobGroupByRole(HttpServletRequest request, List<XxlJobGroup> jobGroupList_all){
List<XxlJobGroup> jobGroupList = new ArrayList<>();
if (jobGroupList_all!=null && !jobGroupList_all.isEmpty()) {
XxlJobUser loginUser = PermissionInterceptor.getLoginUser(request);
if (loginUser.getRole() == 1) {
jobGroupList = jobGroupList_all;
} else {
List<String> groupIdStrs = new ArrayList<>();
if (loginUser.getPermission()!=null && !loginUser.getPermission().trim().isEmpty()) {
groupIdStrs = Arrays.asList(loginUser.getPermission().trim().split(","));
}
for (XxlJobGroup groupItem:jobGroupList_all) {
if (groupIdStrs.contains(String.valueOf(groupItem.getId()))) {
jobGroupList.add(groupItem);
}
}
}
}
return jobGroupList;
}
}

View File

@ -1,28 +0,0 @@
package com.xxl.job.admin.controller.interceptor;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import javax.annotation.Resource;
/**
* web mvc config
*
* @author xuxueli 2018-04-02 20:48:20
*/
@Configuration
public class WebMvcConfig implements WebMvcConfigurer {
@Resource
private PermissionInterceptor permissionInterceptor;
@Resource
private CookieInterceptor cookieInterceptor;
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(permissionInterceptor).addPathPatterns("/**");
registry.addInterceptor(cookieInterceptor).addPathPatterns("/**");
}
}

View File

@ -1,66 +0,0 @@
package com.xxl.job.admin.controller.resolver;
import com.xxl.job.admin.core.exception.XxlJobException;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.admin.core.util.JacksonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* common exception resolver
*
* @author xuxueli 2016-1-6 19:22:18
*/
@Component
public class WebExceptionResolver implements HandlerExceptionResolver {
private static transient Logger logger = LoggerFactory.getLogger(WebExceptionResolver.class);
@Override
public ModelAndView resolveException(HttpServletRequest request,
HttpServletResponse response, Object handler, Exception ex) {
if (!(ex instanceof XxlJobException)) {
logger.error("WebExceptionResolver:{}", ex);
}
// if json
boolean isJson = false;
if (handler instanceof HandlerMethod) {
HandlerMethod method = (HandlerMethod)handler;
ResponseBody responseBody = method.getMethodAnnotation(ResponseBody.class);
if (responseBody != null) {
isJson = true;
}
}
// error result
ReturnT<String> errorResult = new ReturnT<String>(ReturnT.FAIL_CODE, ex.toString().replaceAll("\n", "<br/>"));
// response
ModelAndView mv = new ModelAndView();
if (isJson) {
try {
response.setContentType("application/json;charset=utf-8");
response.getWriter().print(JacksonUtil.writeValueAsString(errorResult));
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return mv;
} else {
mv.addObject("exceptionMsg", errorResult.getMsg());
mv.setViewName("/common/common.exception");
return mv;
}
}
}

View File

@ -1,20 +0,0 @@
package com.xxl.job.admin.core.alarm;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
/**
* @author xuxueli 2020-01-19
*/
public interface JobAlarm {
/**
* job alarm
*
* @param info
* @param jobLog
* @return
*/
public boolean doAlarm(XxlJobInfo info, XxlJobLog jobLog);
}

View File

@ -1,66 +0,0 @@
package com.xxl.job.admin.core.alarm;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Component
public class JobAlarmer implements ApplicationContextAware, InitializingBean {
private static Logger logger = LoggerFactory.getLogger(JobAlarmer.class);
private ApplicationContext applicationContext;
private List<JobAlarm> jobAlarmList;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public void afterPropertiesSet() throws Exception {
Map<String, JobAlarm> serviceBeanMap = applicationContext.getBeansOfType(JobAlarm.class);
if (serviceBeanMap != null && serviceBeanMap.size() > 0) {
jobAlarmList = new ArrayList<JobAlarm>(serviceBeanMap.values());
}
}
/**
* job alarm
*
* @param info
* @param jobLog
* @return
*/
public boolean alarm(XxlJobInfo info, XxlJobLog jobLog) {
boolean result = false;
if (jobAlarmList!=null && !jobAlarmList.isEmpty()) {
// success means all-success
result = true;
for (JobAlarm alarm: jobAlarmList) {
boolean resultItem = false;
try {
resultItem = alarm.doAlarm(info, jobLog);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
if (!resultItem) {
result = false;
}
}
}
return result;
}
}

View File

@ -1,118 +0,0 @@
package com.xxl.job.admin.core.alarm.impl;
import com.xxl.job.admin.core.alarm.JobAlarm;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.model.ReturnT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.stereotype.Component;
import javax.mail.internet.MimeMessage;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
/**
* job alarm by email
*
* @author xuxueli 2020-01-19
*/
@Component
public class EmailJobAlarm implements JobAlarm {
private static Logger logger = LoggerFactory.getLogger(EmailJobAlarm.class);
/**
* fail alarm
*
* @param jobLog
*/
@Override
public boolean doAlarm(XxlJobInfo info, XxlJobLog jobLog){
boolean alarmResult = true;
// send monitor email
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
// alarmContent
String alarmContent = "Alarm Job LogId=" + jobLog.getId();
if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>TriggerMsg=<br>" + jobLog.getTriggerMsg();
}
if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>HandleCode=" + jobLog.getHandleMsg();
}
// email info
XxlJobGroup group = XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().load(Integer.valueOf(info.getJobGroup()));
String personal = I18nUtil.getString("admin_name_full");
String title = I18nUtil.getString("jobconf_monitor");
String content = MessageFormat.format(loadEmailJobAlarmTemplate(),
group!=null?group.getTitle():"null",
info.getId(),
info.getJobDesc(),
alarmContent);
Set<String> emailSet = new HashSet<String>(Arrays.asList(info.getAlarmEmail().split(",")));
for (String email: emailSet) {
// make mail
try {
MimeMessage mimeMessage = XxlJobAdminConfig.getAdminConfig().getMailSender().createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true);
helper.setFrom(XxlJobAdminConfig.getAdminConfig().getEmailFrom(), personal);
helper.setTo(email);
helper.setSubject(title);
helper.setText(content, true);
XxlJobAdminConfig.getAdminConfig().getMailSender().send(mimeMessage);
} catch (Exception e) {
logger.error(">>>>>>>>>>> xxl-job, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e);
alarmResult = false;
}
}
}
return alarmResult;
}
/**
* load email job alarm template
*
* @return
*/
private static final String loadEmailJobAlarmTemplate(){
String mailBodyTemplate = "<h5>" + I18nUtil.getString("jobconf_monitor_detail") + "</span>" +
"<table border=\"1\" cellpadding=\"3\" style=\"border-collapse:collapse; width:80%;\" >\n" +
" <thead style=\"font-weight: bold;color: #ffffff;background-color: #ff8c00;\" >" +
" <tr>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobgroup") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobinfo_field_id") +"</td>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobdesc") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_title") +"</td>\n" +
" <td width=\"40%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_content") +"</td>\n" +
" </tr>\n" +
" </thead>\n" +
" <tbody>\n" +
" <tr>\n" +
" <td>{0}</td>\n" +
" <td>{1}</td>\n" +
" <td>{2}</td>\n" +
" <td>"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"</td>\n" +
" <td>{3}</td>\n" +
" </tr>\n" +
" </tbody>\n" +
"</table>";
return mailBodyTemplate;
}
}

View File

@ -1,99 +0,0 @@
package com.xxl.job.admin.core.complete;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.thread.JobTriggerPoolHelper;
import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.context.XxlJobContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.MessageFormat;
/**
* @author xuxueli 2020-10-30 20:43:10
*/
public class XxlJobCompleter {
private static Logger logger = LoggerFactory.getLogger(XxlJobCompleter.class);
/**
* common fresh handle entrance (limit only once)
*
* @param xxlJobLog
* @return
*/
public static int updateHandleInfoAndFinish(XxlJobLog xxlJobLog) {
// finish
finishJob(xxlJobLog);
// text最大64kb 避免长度过长
if (xxlJobLog.getHandleMsg().length() > 15000) {
xxlJobLog.setHandleMsg( xxlJobLog.getHandleMsg().substring(0, 15000) );
}
// fresh handle
return XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().updateHandleInfo(xxlJobLog);
}
/**
* do somethind to finish job
*/
private static void finishJob(XxlJobLog xxlJobLog){
// 1、handle success, to trigger child job
String triggerChildMsg = null;
if (XxlJobContext.HANDLE_CODE_SUCCESS == xxlJobLog.getHandleCode()) {
XxlJobInfo xxlJobInfo = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().loadById(xxlJobLog.getJobId());
if (xxlJobInfo!=null && xxlJobInfo.getChildJobId()!=null && xxlJobInfo.getChildJobId().trim().length()>0) {
triggerChildMsg = "<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_child_run") +"<<<<<<<<<<< </span><br>";
String[] childJobIds = xxlJobInfo.getChildJobId().split(",");
for (int i = 0; i < childJobIds.length; i++) {
int childJobId = (childJobIds[i]!=null && childJobIds[i].trim().length()>0 && isNumeric(childJobIds[i]))?Integer.valueOf(childJobIds[i]):-1;
if (childJobId > 0) {
JobTriggerPoolHelper.trigger(childJobId, TriggerTypeEnum.PARENT, -1, null, null, null);
ReturnT<String> triggerChildResult = ReturnT.SUCCESS;
// add msg
triggerChildMsg += MessageFormat.format(I18nUtil.getString("jobconf_callback_child_msg1"),
(i+1),
childJobIds.length,
childJobIds[i],
(triggerChildResult.getCode()==ReturnT.SUCCESS_CODE?I18nUtil.getString("system_success"):I18nUtil.getString("system_fail")),
triggerChildResult.getMsg());
} else {
triggerChildMsg += MessageFormat.format(I18nUtil.getString("jobconf_callback_child_msg2"),
(i+1),
childJobIds.length,
childJobIds[i]);
}
}
}
}
if (triggerChildMsg != null) {
xxlJobLog.setHandleMsg( xxlJobLog.getHandleMsg() + triggerChildMsg );
}
// 2、fix_delay trigger next
// on the way
}
private static boolean isNumeric(String str){
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
}

View File

@ -1,159 +0,0 @@
package com.xxl.job.admin.core.conf;
import com.xxl.job.admin.core.alarm.JobAlarmer;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.dao.*;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.sql.DataSource;
import java.util.Arrays;
/**
* xxl-job config
*
* @author xuxueli 2017-04-28
*/
@Component
public class XxlJobAdminConfig implements InitializingBean, DisposableBean {
private static XxlJobAdminConfig adminConfig = null;
public static XxlJobAdminConfig getAdminConfig() {
return adminConfig;
}
// ---------------------- XxlJobScheduler ----------------------
private XxlJobScheduler xxlJobScheduler;
@Override
public void afterPropertiesSet() throws Exception {
adminConfig = this;
xxlJobScheduler = new XxlJobScheduler();
xxlJobScheduler.init();
}
@Override
public void destroy() throws Exception {
xxlJobScheduler.destroy();
}
// ---------------------- XxlJobScheduler ----------------------
// conf
@Value("${xxl.job.i18n}")
private String i18n;
@Value("${xxl.job.accessToken}")
private String accessToken;
@Value("${spring.mail.from}")
private String emailFrom;
@Value("${xxl.job.triggerpool.fast.max}")
private int triggerPoolFastMax;
@Value("${xxl.job.triggerpool.slow.max}")
private int triggerPoolSlowMax;
@Value("${xxl.job.logretentiondays}")
private int logretentiondays;
// dao, service
@Resource
private XxlJobLogDao xxlJobLogDao;
@Resource
private XxlJobInfoDao xxlJobInfoDao;
@Resource
private XxlJobRegistryDao xxlJobRegistryDao;
@Resource
private XxlJobGroupDao xxlJobGroupDao;
@Resource
private XxlJobLogReportDao xxlJobLogReportDao;
@Resource
private JavaMailSender mailSender;
@Resource
private DataSource dataSource;
@Resource
private JobAlarmer jobAlarmer;
public String getI18n() {
if (!Arrays.asList("zh_CN", "zh_TC", "en").contains(i18n)) {
return "zh_CN";
}
return i18n;
}
public String getAccessToken() {
return accessToken;
}
public String getEmailFrom() {
return emailFrom;
}
public int getTriggerPoolFastMax() {
if (triggerPoolFastMax < 200) {
return 200;
}
return triggerPoolFastMax;
}
public int getTriggerPoolSlowMax() {
if (triggerPoolSlowMax < 100) {
return 100;
}
return triggerPoolSlowMax;
}
public int getLogretentiondays() {
if (logretentiondays < 7) {
// Limit greater than or equal to 7, otherwise close
return -1;
}
return logretentiondays;
}
public XxlJobLogDao getXxlJobLogDao() {
return xxlJobLogDao;
}
public XxlJobInfoDao getXxlJobInfoDao() {
return xxlJobInfoDao;
}
public XxlJobRegistryDao getXxlJobRegistryDao() {
return xxlJobRegistryDao;
}
public XxlJobGroupDao getXxlJobGroupDao() {
return xxlJobGroupDao;
}
public XxlJobLogReportDao getXxlJobLogReportDao() {
return xxlJobLogReportDao;
}
public JavaMailSender getMailSender() {
return mailSender;
}
public DataSource getDataSource() {
return dataSource;
}
public JobAlarmer getJobAlarmer() {
return jobAlarmer;
}
}

View File

@ -1,14 +0,0 @@
package com.xxl.job.admin.core.exception;
/**
* @author xuxueli 2019-05-04 23:19:29
*/
public class XxlJobException extends RuntimeException {
public XxlJobException() {
}
public XxlJobException(String message) {
super(message);
}
}

View File

@ -1,79 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
/**
* Created by xuxueli on 16/9/30.
*/
public class XxlJobGroup {
private int id;
private String appname;
private String title;
// 执行器地址类型0=自动注册、1=手动录入
private int addressType;
// 执行器地址列表,多地址逗号分隔(手动录入)
private String addressList;
private Date updateTime;
// registry list // 执行器地址列表(系统注册)
private List<String> registryList;
public List<String> getRegistryList() {
if (addressList!=null && addressList.trim().length()>0) {
registryList = new ArrayList<String>(Arrays.asList(addressList.split(",")));
}
return registryList;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getAppname() {
return appname;
}
public void setAppname(String appname) {
this.appname = appname;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getAddressType() {
return addressType;
}
public void setAddressType(int addressType) {
this.addressType = addressType;
}
public String getAddressList() {
return addressList;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public void setAddressList(String addressList) {
this.addressList = addressList;
}
}

View File

@ -1,255 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
/**
* xxl-job info
*
* @author xuxueli 2016-1-12 18:25:49
*/
public class XxlJobInfo {
// 主键ID
private int id;
// 执行器主键ID
private int jobGroup;
private String jobDesc;
private Date addTime;
private Date updateTime;
// 负责人
private String author;
// 报警邮件
private String alarmEmail;
// 调度类型
private String scheduleType;
// 调度配置,值含义取决于调度类型
private String scheduleConf;
// 调度过期策略
private String misfireStrategy;
// 执行器路由策略
private String executorRouteStrategy;
// 执行器任务Handler名称
private String executorHandler;
// 执行器,任务参数
private String executorParam;
// 阻塞处理策略
private String executorBlockStrategy;
// 任务执行超时时间,单位秒
private int executorTimeout;
// 失败重试次数
private int executorFailRetryCount;
// GLUE类型 #com.xxl.job.core.glue.GlueTypeEnum
private String glueType;
// GLUE源代码
private String glueSource;
// GLUE备注
private String glueRemark;
// GLUE更新时间
private Date glueUpdatetime;
// 子任务ID多个逗号分隔
private String childJobId;
// 调度状态0-停止1-运行
private int triggerStatus;
// 上次调度时间
private long triggerLastTime;
// 下次调度时间
private long triggerNextTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getJobGroup() {
return jobGroup;
}
public void setJobGroup(int jobGroup) {
this.jobGroup = jobGroup;
}
public String getJobDesc() {
return jobDesc;
}
public void setJobDesc(String jobDesc) {
this.jobDesc = jobDesc;
}
public Date getAddTime() {
return addTime;
}
public void setAddTime(Date addTime) {
this.addTime = addTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getAlarmEmail() {
return alarmEmail;
}
public void setAlarmEmail(String alarmEmail) {
this.alarmEmail = alarmEmail;
}
public String getScheduleType() {
return scheduleType;
}
public void setScheduleType(String scheduleType) {
this.scheduleType = scheduleType;
}
public String getScheduleConf() {
return scheduleConf;
}
public void setScheduleConf(String scheduleConf) {
this.scheduleConf = scheduleConf;
}
public String getMisfireStrategy() {
return misfireStrategy;
}
public void setMisfireStrategy(String misfireStrategy) {
this.misfireStrategy = misfireStrategy;
}
public String getExecutorRouteStrategy() {
return executorRouteStrategy;
}
public void setExecutorRouteStrategy(String executorRouteStrategy) {
this.executorRouteStrategy = executorRouteStrategy;
}
public String getExecutorHandler() {
return executorHandler;
}
public void setExecutorHandler(String executorHandler) {
this.executorHandler = executorHandler;
}
public String getExecutorParam() {
return executorParam;
}
public void setExecutorParam(String executorParam) {
this.executorParam = executorParam;
}
public String getExecutorBlockStrategy() {
return executorBlockStrategy;
}
public void setExecutorBlockStrategy(String executorBlockStrategy) {
this.executorBlockStrategy = executorBlockStrategy;
}
public int getExecutorTimeout() {
return executorTimeout;
}
public void setExecutorTimeout(int executorTimeout) {
this.executorTimeout = executorTimeout;
}
public int getExecutorFailRetryCount() {
return executorFailRetryCount;
}
public void setExecutorFailRetryCount(int executorFailRetryCount) {
this.executorFailRetryCount = executorFailRetryCount;
}
public String getGlueType() {
return glueType;
}
public void setGlueType(String glueType) {
this.glueType = glueType;
}
public String getGlueSource() {
return glueSource;
}
public void setGlueSource(String glueSource) {
this.glueSource = glueSource;
}
public String getGlueRemark() {
return glueRemark;
}
public void setGlueRemark(String glueRemark) {
this.glueRemark = glueRemark;
}
public Date getGlueUpdatetime() {
return glueUpdatetime;
}
public void setGlueUpdatetime(Date glueUpdatetime) {
this.glueUpdatetime = glueUpdatetime;
}
public String getChildJobId() {
return childJobId;
}
public void setChildJobId(String childJobId) {
this.childJobId = childJobId;
}
public int getTriggerStatus() {
return triggerStatus;
}
public void setTriggerStatus(int triggerStatus) {
this.triggerStatus = triggerStatus;
}
public long getTriggerLastTime() {
return triggerLastTime;
}
public void setTriggerLastTime(long triggerLastTime) {
this.triggerLastTime = triggerLastTime;
}
public long getTriggerNextTime() {
return triggerNextTime;
}
public void setTriggerNextTime(long triggerNextTime) {
this.triggerNextTime = triggerNextTime;
}
}

View File

@ -1,157 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
/**
* xxl-job log, used to track trigger process
* @author xuxueli 2015-12-19 23:19:09
*/
public class XxlJobLog {
private long id;
// job info
private int jobGroup;
private int jobId;
// execute info
private String executorAddress;
private String executorHandler;
private String executorParam;
private String executorShardingParam;
private int executorFailRetryCount;
// trigger info
private Date triggerTime;
private int triggerCode;
private String triggerMsg;
// handle info
private Date handleTime;
private int handleCode;
private String handleMsg;
// alarm info
private int alarmStatus;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public int getJobGroup() {
return jobGroup;
}
public void setJobGroup(int jobGroup) {
this.jobGroup = jobGroup;
}
public int getJobId() {
return jobId;
}
public void setJobId(int jobId) {
this.jobId = jobId;
}
public String getExecutorAddress() {
return executorAddress;
}
public void setExecutorAddress(String executorAddress) {
this.executorAddress = executorAddress;
}
public String getExecutorHandler() {
return executorHandler;
}
public void setExecutorHandler(String executorHandler) {
this.executorHandler = executorHandler;
}
public String getExecutorParam() {
return executorParam;
}
public void setExecutorParam(String executorParam) {
this.executorParam = executorParam;
}
public String getExecutorShardingParam() {
return executorShardingParam;
}
public void setExecutorShardingParam(String executorShardingParam) {
this.executorShardingParam = executorShardingParam;
}
public int getExecutorFailRetryCount() {
return executorFailRetryCount;
}
public void setExecutorFailRetryCount(int executorFailRetryCount) {
this.executorFailRetryCount = executorFailRetryCount;
}
public Date getTriggerTime() {
return triggerTime;
}
public void setTriggerTime(Date triggerTime) {
this.triggerTime = triggerTime;
}
public int getTriggerCode() {
return triggerCode;
}
public void setTriggerCode(int triggerCode) {
this.triggerCode = triggerCode;
}
public String getTriggerMsg() {
return triggerMsg;
}
public void setTriggerMsg(String triggerMsg) {
this.triggerMsg = triggerMsg;
}
public Date getHandleTime() {
return handleTime;
}
public void setHandleTime(Date handleTime) {
this.handleTime = handleTime;
}
public int getHandleCode() {
return handleCode;
}
public void setHandleCode(int handleCode) {
this.handleCode = handleCode;
}
public String getHandleMsg() {
return handleMsg;
}
public void setHandleMsg(String handleMsg) {
this.handleMsg = handleMsg;
}
public int getAlarmStatus() {
return alarmStatus;
}
public void setAlarmStatus(int alarmStatus) {
this.alarmStatus = alarmStatus;
}
}

View File

@ -1,77 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
/**
* xxl-job log for glue, used to track job code process
* @author xuxueli 2016-5-19 17:57:46
*/
public class XxlJobLogGlue {
private int id;
// 任务主键ID
private int jobId;
// GLUE类型 #com.xxl.job.core.glue.GlueTypeEnum
private String glueType;
private String glueSource;
private String glueRemark;
private Date addTime;
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getJobId() {
return jobId;
}
public void setJobId(int jobId) {
this.jobId = jobId;
}
public String getGlueType() {
return glueType;
}
public void setGlueType(String glueType) {
this.glueType = glueType;
}
public String getGlueSource() {
return glueSource;
}
public void setGlueSource(String glueSource) {
this.glueSource = glueSource;
}
public String getGlueRemark() {
return glueRemark;
}
public void setGlueRemark(String glueRemark) {
this.glueRemark = glueRemark;
}
public Date getAddTime() {
return addTime;
}
public void setAddTime(Date addTime) {
this.addTime = addTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}

View File

@ -1,54 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
public class XxlJobLogReport {
private int id;
private Date triggerDay;
private int runningCount;
private int sucCount;
private int failCount;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Date getTriggerDay() {
return triggerDay;
}
public void setTriggerDay(Date triggerDay) {
this.triggerDay = triggerDay;
}
public int getRunningCount() {
return runningCount;
}
public void setRunningCount(int runningCount) {
this.runningCount = runningCount;
}
public int getSucCount() {
return sucCount;
}
public void setSucCount(int sucCount) {
this.sucCount = sucCount;
}
public int getFailCount() {
return failCount;
}
public void setFailCount(int failCount) {
this.failCount = failCount;
}
}

View File

@ -1,55 +0,0 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
/**
* Created by xuxueli on 16/9/30.
*/
public class XxlJobRegistry {
private int id;
private String registryGroup;
private String registryKey;
private String registryValue;
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getRegistryGroup() {
return registryGroup;
}
public void setRegistryGroup(String registryGroup) {
this.registryGroup = registryGroup;
}
public String getRegistryKey() {
return registryKey;
}
public void setRegistryKey(String registryKey) {
this.registryKey = registryKey;
}
public String getRegistryValue() {
return registryValue;
}
public void setRegistryValue(String registryValue) {
this.registryValue = registryValue;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}

View File

@ -1,77 +0,0 @@
package com.xxl.job.admin.core.model;
import org.springframework.util.StringUtils;
/**
* @author xuxueli 2019-05-04 16:43:12
*/
public class XxlJobUser {
private int id;
// 账号
private String username;
// 密码
private String password;
// 角色0-普通用户、1-管理员
private int role;
// 权限执行器ID列表多个逗号分割
private String permission;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public int getRole() {
return role;
}
public void setRole(int role) {
this.role = role;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
// plugin
public boolean validPermission(int jobGroup){
if (this.role == 1) {
return true;
} else {
if (StringUtils.hasText(this.permission)) {
for (String permissionItem : this.permission.split(",")) {
if (String.valueOf(jobGroup).equals(permissionItem)) {
return true;
}
}
}
return false;
}
}
}

View File

@ -1,32 +0,0 @@
//package com.xxl.job.admin.core.jobbean;
//
//import com.xxl.job.admin.core.thread.JobTriggerPoolHelper;
//import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
//import org.quartz.JobExecutionContext;
//import org.quartz.JobExecutionException;
//import org.quartz.JobKey;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//import org.springframework.scheduling.quartz.QuartzJobBean;
//
///**
// * http job bean
// * “@DisallowConcurrentExecution” disable concurrent, thread size can not be only one, better given more
// * @author xuxueli 2015-12-17 18:20:34
// */
////@DisallowConcurrentExecution
//public class RemoteHttpJobBean extends QuartzJobBean {
// private static Logger logger = LoggerFactory.getLogger(RemoteHttpJobBean.class);
//
// @Override
// protected void executeInternal(JobExecutionContext context)
// throws JobExecutionException {
//
// // load jobId
// JobKey jobKey = context.getTrigger().getJobKey();
// Integer jobId = Integer.valueOf(jobKey.getName());
//
//
// }
//
//}

View File

@ -1,413 +0,0 @@
//package com.xxl.job.admin.core.schedule;
//
//import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
//import com.xxl.job.admin.core.jobbean.RemoteHttpJobBean;
//import com.xxl.job.admin.core.model.XxlJobInfo;
//import com.xxl.job.admin.core.thread.JobFailMonitorHelper;
//import com.xxl.job.admin.core.thread.JobRegistryMonitorHelper;
//import com.xxl.job.admin.core.thread.JobTriggerPoolHelper;
//import com.xxl.job.admin.core.util.I18nUtil;
//import com.xxl.job.core.biz.AdminBiz;
//import com.xxl.job.core.biz.ExecutorBiz;
//import com.xxl.job.core.enums.ExecutorBlockStrategyEnum;
//import com.xxl.rpc.remoting.invoker.XxlRpcInvokerFactory;
//import com.xxl.rpc.remoting.invoker.call.CallType;
//import com.xxl.rpc.remoting.invoker.reference.XxlRpcReferenceBean;
//import com.xxl.rpc.remoting.invoker.route.LoadBalance;
//import com.xxl.rpc.remoting.net.NetEnum;
//import com.xxl.rpc.remoting.net.impl.servlet.server.ServletServerHandler;
//import com.xxl.rpc.remoting.provider.XxlRpcProviderFactory;
//import com.xxl.rpc.serialize.Serializer;
//import org.quartz.*;
//import org.quartz.Trigger.TriggerState;
//import org.quartz.impl.triggers.CronTriggerImpl;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//import org.springframework.util.Assert;
//
//import javax.servlet.ServletException;
//import javax.servlet.http.HttpServletRequest;
//import javax.servlet.http.HttpServletResponse;
//import java.io.IOException;
//import java.util.Date;
//import java.util.concurrent.ConcurrentHashMap;
//
///**
// * base quartz scheduler util
// * @author xuxueli 2015-12-19 16:13:53
// */
//public final class XxlJobDynamicScheduler {
// private static final Logger logger = LoggerFactory.getLogger(XxlJobDynamicScheduler_old.class);
//
// // ---------------------- param ----------------------
//
// // scheduler
// private static Scheduler scheduler;
// public void setScheduler(Scheduler scheduler) {
// XxlJobDynamicScheduler_old.scheduler = scheduler;
// }
//
//
// // ---------------------- init + destroy ----------------------
// public void start() throws Exception {
// // valid
// Assert.notNull(scheduler, "quartz scheduler is null");
//
// // init i18n
// initI18n();
//
// // admin registry monitor run
// JobRegistryMonitorHelper.getInstance().start();
//
// // admin monitor run
// JobFailMonitorHelper.getInstance().start();
//
// // admin-server
// initRpcProvider();
//
// logger.info(">>>>>>>>> init xxl-job admin success.");
// }
//
//
// public void destroy() throws Exception {
// // admin trigger pool stop
// JobTriggerPoolHelper.toStop();
//
// // admin registry stop
// JobRegistryMonitorHelper.getInstance().toStop();
//
// // admin monitor stop
// JobFailMonitorHelper.getInstance().toStop();
//
// // admin-server
// stopRpcProvider();
// }
//
//
// // ---------------------- I18n ----------------------
//
// private void initI18n(){
// for (ExecutorBlockStrategyEnum item:ExecutorBlockStrategyEnum.values()) {
// item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
// }
// }
//
//
// // ---------------------- admin rpc provider (no server version) ----------------------
// private static ServletServerHandler servletServerHandler;
// private void initRpcProvider(){
// // init
// XxlRpcProviderFactory xxlRpcProviderFactory = new XxlRpcProviderFactory();
// xxlRpcProviderFactory.initConfig(
// NetEnum.NETTY_HTTP,
// Serializer.SerializeEnum.HESSIAN.getSerializer(),
// null,
// 0,
// XxlJobAdminConfig.getAdminConfig().getAccessToken(),
// null,
// null);
//
// // add services
// xxlRpcProviderFactory.addService(AdminBiz.class.getName(), null, XxlJobAdminConfig.getAdminConfig().getAdminBiz());
//
// // servlet handler
// servletServerHandler = new ServletServerHandler(xxlRpcProviderFactory);
// }
// private void stopRpcProvider() throws Exception {
// XxlRpcInvokerFactory.getInstance().stop();
// }
// public static void invokeAdminService(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
// servletServerHandler.handle(null, request, response);
// }
//
//
// // ---------------------- executor-client ----------------------
// private static ConcurrentHashMap<String, ExecutorBiz> executorBizRepository = new ConcurrentHashMap<String, ExecutorBiz>();
// public static ExecutorBiz getExecutorBiz(String address) throws Exception {
// // valid
// if (address==null || address.trim().length()==0) {
// return null;
// }
//
// // load-cache
// address = address.trim();
// ExecutorBiz executorBiz = executorBizRepository.get(address);
// if (executorBiz != null) {
// return executorBiz;
// }
//
// // set-cache
// executorBiz = (ExecutorBiz) new XxlRpcReferenceBean(
// NetEnum.NETTY_HTTP,
// Serializer.SerializeEnum.HESSIAN.getSerializer(),
// CallType.SYNC,
// LoadBalance.ROUND,
// ExecutorBiz.class,
// null,
// 5000,
// address,
// XxlJobAdminConfig.getAdminConfig().getAccessToken(),
// null,
// null).getObject();
//
// executorBizRepository.put(address, executorBiz);
// return executorBiz;
// }
//
//
// // ---------------------- schedule util ----------------------
//
// /**
// * fill job info
// *
// * @param jobInfo
// */
// public static void fillJobInfo(XxlJobInfo jobInfo) {
//
// String name = String.valueOf(jobInfo.getId());
//
// // trigger key
// TriggerKey triggerKey = TriggerKey.triggerKey(name);
// try {
//
// // trigger cron
// Trigger trigger = scheduler.getTrigger(triggerKey);
// if (trigger!=null && trigger instanceof CronTriggerImpl) {
// String cronExpression = ((CronTriggerImpl) trigger).getCronExpression();
// jobInfo.setJobCron(cronExpression);
// }
//
// // trigger state
// TriggerState triggerState = scheduler.getTriggerState(triggerKey);
// if (triggerState!=null) {
// jobInfo.setJobStatus(triggerState.name());
// }
//
// //JobKey jobKey = new JobKey(jobInfo.getJobName(), String.valueOf(jobInfo.getJobGroup()));
// //JobDetail jobDetail = scheduler.getJobDetail(jobKey);
// //String jobClass = jobDetail.getJobClass().getName();
//
// } catch (SchedulerException e) {
// logger.error(e.getMessage(), e);
// }
// }
//
//
// /**
// * add trigger + job
// *
// * @param jobName
// * @param cronExpression
// * @return
// * @throws SchedulerException
// */
// public static boolean addJob(String jobName, String cronExpression) throws SchedulerException {
// // 1、job key
// TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
// JobKey jobKey = new JobKey(jobName);
//
// // 2、valid
// if (scheduler.checkExists(triggerKey)) {
// return true; // PASS
// }
//
// // 3、corn trigger
// CronScheduleBuilder cronScheduleBuilder = CronScheduleBuilder.cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing(); // withMisfireHandlingInstructionDoNothing 忽略掉调度终止过程中忽略的调度
// CronTrigger cronTrigger = TriggerBuilder.newTrigger().withIdentity(triggerKey).withSchedule(cronScheduleBuilder).build();
//
// // 4、job detail
// Class<? extends Job> jobClass_ = RemoteHttpJobBean.class; // Class.forName(jobInfo.getJobClass());
// JobDetail jobDetail = JobBuilder.newJob(jobClass_).withIdentity(jobKey).build();
//
// /*if (jobInfo.getJobData()!=null) {
// JobDataMap jobDataMap = jobDetail.getJobDataMap();
// jobDataMap.putAll(JacksonUtil.readValue(jobInfo.getJobData(), Map.class));
// // JobExecutionContext context.getMergedJobDataMap().get("mailGuid");
// }*/
//
// // 5、schedule job
// Date date = scheduler.scheduleJob(jobDetail, cronTrigger);
//
// logger.info(">>>>>>>>>>> addJob success(quartz), jobDetail:{}, cronTrigger:{}, date:{}", jobDetail, cronTrigger, date);
// return true;
// }
//
//
// /**
// * remove trigger + job
// *
// * @param jobName
// * @return
// * @throws SchedulerException
// */
// public static boolean removeJob(String jobName) throws SchedulerException {
//
// JobKey jobKey = new JobKey(jobName);
// scheduler.deleteJob(jobKey);
//
// /*TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
// if (scheduler.checkExists(triggerKey)) {
// scheduler.unscheduleJob(triggerKey); // trigger + job
// }*/
//
// logger.info(">>>>>>>>>>> removeJob success(quartz), jobKey:{}", jobKey);
// return true;
// }
//
//
// /**
// * updateJobCron
// *
// * @param jobName
// * @param cronExpression
// * @return
// * @throws SchedulerException
// */
// public static boolean updateJobCron(String jobName, String cronExpression) throws SchedulerException {
//
// // 1、job key
// TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
//
// // 2、valid
// if (!scheduler.checkExists(triggerKey)) {
// return true; // PASS
// }
//
// CronTrigger oldTrigger = (CronTrigger) scheduler.getTrigger(triggerKey);
//
// // 3、avoid repeat cron
// String oldCron = oldTrigger.getCronExpression();
// if (oldCron.equals(cronExpression)){
// return true; // PASS
// }
//
// // 4、new cron trigger
// CronScheduleBuilder cronScheduleBuilder = CronScheduleBuilder.cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing();
// oldTrigger = oldTrigger.getTriggerBuilder().withIdentity(triggerKey).withSchedule(cronScheduleBuilder).build();
//
// // 5、rescheduleJob
// scheduler.rescheduleJob(triggerKey, oldTrigger);
//
// /*
// JobKey jobKey = new JobKey(jobName);
//
// // old job detail
// JobDetail jobDetail = scheduler.getJobDetail(jobKey);
//
// // new trigger
// HashSet<Trigger> triggerSet = new HashSet<Trigger>();
// triggerSet.add(cronTrigger);
// // cover trigger of job detail
// scheduler.scheduleJob(jobDetail, triggerSet, true);*/
//
// logger.info(">>>>>>>>>>> resumeJob success, JobName:{}", jobName);
// return true;
// }
//
//
// /**
// * pause
// *
// * @param jobName
// * @return
// * @throws SchedulerException
// */
// /*public static boolean pauseJob(String jobName) throws SchedulerException {
//
// TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
//
// boolean result = false;
// if (scheduler.checkExists(triggerKey)) {
// scheduler.pauseTrigger(triggerKey);
// result = true;
// }
//
// logger.info(">>>>>>>>>>> pauseJob {}, triggerKey:{}", (result?"success":"fail"),triggerKey);
// return result;
// }*/
//
//
// /**
// * resume
// *
// * @param jobName
// * @return
// * @throws SchedulerException
// */
// /*public static boolean resumeJob(String jobName) throws SchedulerException {
//
// TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
//
// boolean result = false;
// if (scheduler.checkExists(triggerKey)) {
// scheduler.resumeTrigger(triggerKey);
// result = true;
// }
//
// logger.info(">>>>>>>>>>> resumeJob {}, triggerKey:{}", (result?"success":"fail"), triggerKey);
// return result;
// }*/
//
//
// /**
// * run
// *
// * @param jobName
// * @return
// * @throws SchedulerException
// */
// /*public static boolean triggerJob(String jobName) throws SchedulerException {
// // TriggerKey : name + group
// JobKey jobKey = new JobKey(jobName);
// TriggerKey triggerKey = TriggerKey.triggerKey(jobName);
//
// boolean result = false;
// if (scheduler.checkExists(triggerKey)) {
// scheduler.triggerJob(jobKey);
// result = true;
// logger.info(">>>>>>>>>>> runJob success, jobKey:{}", jobKey);
// } else {
// logger.info(">>>>>>>>>>> runJob fail, jobKey:{}", jobKey);
// }
// return result;
// }*/
//
//
// /**
// * finaAllJobList
// *
// * @return
// *//*
// @Deprecated
// public static List<Map<String, Object>> finaAllJobList(){
// List<Map<String, Object>> jobList = new ArrayList<Map<String,Object>>();
//
// try {
// if (scheduler.getJobGroupNames()==null || scheduler.getJobGroupNames().size()==0) {
// return null;
// }
// String groupName = scheduler.getJobGroupNames().get(0);
// Set<JobKey> jobKeys = scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName));
// if (jobKeys!=null && jobKeys.size()>0) {
// for (JobKey jobKey : jobKeys) {
// TriggerKey triggerKey = TriggerKey.triggerKey(jobKey.getName(), Scheduler.DEFAULT_GROUP);
// Trigger trigger = scheduler.getTrigger(triggerKey);
// JobDetail jobDetail = scheduler.getJobDetail(jobKey);
// TriggerState triggerState = scheduler.getTriggerState(triggerKey);
// Map<String, Object> jobMap = new HashMap<String, Object>();
// jobMap.put("TriggerKey", triggerKey);
// jobMap.put("Trigger", trigger);
// jobMap.put("JobDetail", jobDetail);
// jobMap.put("TriggerState", triggerState);
// jobList.add(jobMap);
// }
// }
//
// } catch (SchedulerException e) {
// logger.error(e.getMessage(), e);
// return null;
// }
// return jobList;
// }*/
//
//}

View File

@ -1,58 +0,0 @@
//package com.xxl.job.admin.core.quartz;
//
//import org.quartz.SchedulerConfigException;
//import org.quartz.spi.ThreadPool;
//
///**
// * single thread pool, for async trigger
// *
// * @author xuxueli 2019-03-06
// */
//public class XxlJobThreadPool implements ThreadPool {
//
// @Override
// public boolean runInThread(Runnable runnable) {
//
// // async run
// runnable.run();
// return true;
//
// //return false;
// }
//
// @Override
// public int blockForAvailableThreads() {
// return 1;
// }
//
// @Override
// public void initialize() throws SchedulerConfigException {
//
// }
//
// @Override
// public void shutdown(boolean waitForJobsToComplete) {
//
// }
//
// @Override
// public int getPoolSize() {
// return 1;
// }
//
// @Override
// public void setInstanceId(String schedInstId) {
//
// }
//
// @Override
// public void setInstanceName(String schedName) {
//
// }
//
// // support
// public void setThreadCount(int count) {
// //
// }
//
//}

View File

@ -1,48 +0,0 @@
package com.xxl.job.admin.core.route;
import com.xxl.job.admin.core.route.strategy.*;
import com.xxl.job.admin.core.util.I18nUtil;
/**
* Created by xuxueli on 17/3/10.
*/
public enum ExecutorRouteStrategyEnum {
FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()),
LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()),
ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()),
RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()),
CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()),
LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()),
LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()),
FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()),
BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()),
SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null);
ExecutorRouteStrategyEnum(String title, ExecutorRouter router) {
this.title = title;
this.router = router;
}
private String title;
private ExecutorRouter router;
public String getTitle() {
return title;
}
public ExecutorRouter getRouter() {
return router;
}
public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){
if (name != null) {
for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
}
return defaultItem;
}
}

View File

@ -1,24 +0,0 @@
package com.xxl.job.admin.core.route;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public abstract class ExecutorRouter {
protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class);
/**
* route address
*
* @param addressList
* @return ReturnT.content=address
*/
public abstract ReturnT<String> route(TriggerParam triggerParam, List<String> addressList);
}

View File

@ -1,48 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.ExecutorBiz;
import com.xxl.job.core.biz.model.IdleBeatParam;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteBusyover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer idleBeatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> idleBeatResult = null;
try {
ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(address);
idleBeatResult = executorBiz.idleBeat(new IdleBeatParam(triggerParam.getJobId()));
} catch (Exception e) {
logger.error(e.getMessage(), e);
idleBeatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_idleBeat") + "")
.append("<br>address").append(address)
.append("<br>code").append(idleBeatResult.getCode())
.append("<br>msg").append(idleBeatResult.getMsg());
// beat success
if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) {
idleBeatResult.setMsg(idleBeatResultSB.toString());
idleBeatResult.setContent(address);
return idleBeatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, idleBeatResultSB.toString());
}
}

View File

@ -1,85 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* JOBJOBJOB
* avirtual node
* bhash method replace hashCodeStringhashCodehashCode
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteConsistentHash extends ExecutorRouter {
private static int VIRTUAL_NODE_NUM = 100;
/**
* get hash code on 2^32 ring (md5hash)
* @param key
* @return
*/
private static long hash(String key) {
// md5 byte
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("MD5 not supported", e);
}
md5.reset();
byte[] keyBytes = null;
try {
keyBytes = key.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Unknown string :" + key, e);
}
md5.update(keyBytes);
byte[] digest = md5.digest();
// hash code, Truncate to 32-bits
long hashCode = ((long) (digest[3] & 0xFF) << 24)
| ((long) (digest[2] & 0xFF) << 16)
| ((long) (digest[1] & 0xFF) << 8)
| (digest[0] & 0xFF);
long truncateHashCode = hashCode & 0xffffffffL;
return truncateHashCode;
}
public String hashJob(int jobId, List<String> addressList) {
// ------A1------A2-------A3------
// -----------J1------------------
TreeMap<Long, String> addressRing = new TreeMap<Long, String>();
for (String address: addressList) {
for (int i = 0; i < VIRTUAL_NODE_NUM; i++) {
long addressHash = hash("SHARD-" + address + "-NODE-" + i);
addressRing.put(addressHash, address);
}
}
long jobHash = hash(String.valueOf(jobId));
SortedMap<Long, String> lastRing = addressRing.tailMap(jobHash);
if (!lastRing.isEmpty()) {
return lastRing.get(lastRing.firstKey());
}
return addressRing.firstEntry().getValue();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = hashJob(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -1,48 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.ExecutorBiz;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteFailover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer beatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> beatResult = null;
try {
ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(address);
beatResult = executorBiz.beat();
} catch (Exception e) {
logger.error(e.getMessage(), e);
beatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
beatResultSB.append( (beatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_beat") + "")
.append("<br>address").append(address)
.append("<br>code").append(beatResult.getCode())
.append("<br>msg").append(beatResult.getMsg());
// beat success
if (beatResult.getCode() == ReturnT.SUCCESS_CODE) {
beatResult.setMsg(beatResultSB.toString());
beatResult.setContent(address);
return beatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, beatResultSB.toString());
}
}

View File

@ -1,19 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteFirst extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList){
return new ReturnT<String>(addressList.get(0));
}
}

View File

@ -1,82 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* JOB使
* a(*)LFU(Least Frequently Used)使/
* bLRU(Least Recently Used)使
*
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLFU extends ExecutorRouter {
private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLfuMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// lfu item init
// Key排序可以用TreeMap+构造入参CompareValue排序暂时只能通过ArrayList
HashMap<String, Integer> lfuItemMap = jobLfuMap.get(jobId);
if (lfuItemMap == null) {
lfuItemMap = new HashMap<>();
// 避免重复覆盖
jobLfuMap.putIfAbsent(jobId, lfuItemMap);
}
// put new
for (String address: addressList) {
if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) {
// 初始化时主动Random一次缓解首次压力
lfuItemMap.put(address, new Random().nextInt(addressList.size()));
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lfuItemMap.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lfuItemMap.remove(delKey);
}
}
// load least userd count address
List<Map.Entry<String, Integer>> lfuItemList = new ArrayList<Map.Entry<String, Integer>>(lfuItemMap.entrySet());
Collections.sort(lfuItemList, new Comparator<Map.Entry<String, Integer>>() {
@Override
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
Map.Entry<String, Integer> addressItem = lfuItemList.get(0);
String minAddress = addressItem.getKey();
addressItem.setValue(addressItem.getValue() + 1);
return addressItem.getKey();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -1,76 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* JOB使
* aLFU(Least Frequently Used)使/
* b(*)LRU(Least Recently Used)使
*
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLRU extends ExecutorRouter {
private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLRUMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// init lru
LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
if (lruItem == null) {
/**
* LinkedHashMap
* aaccessOrdertrue=访get/putfalse=
* bremoveEldestEntrytrueLinkedHashMaptrueLRU
*/
lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
jobLRUMap.putIfAbsent(jobId, lruItem);
}
// put new
for (String address: addressList) {
if (!lruItem.containsKey(address)) {
lruItem.put(address, address);
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lruItem.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lruItem.remove(delKey);
}
}
// load
String eldestKey = lruItem.entrySet().iterator().next().getKey();
String eldestValue = lruItem.get(eldestKey);
return eldestValue;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -1,19 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLast extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
return new ReturnT<String>(addressList.get(addressList.size()-1));
}
}

View File

@ -1,23 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
import java.util.Random;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteRandom extends ExecutorRouter {
private static Random localRandom = new Random();
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(localRandom.nextInt(addressList.size()));
return new ReturnT<String>(address);
}
}

View File

@ -1,46 +0,0 @@
package com.xxl.job.admin.core.route.strategy;
import com.xxl.job.admin.core.route.ExecutorRouter;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteRound extends ExecutorRouter {
private static ConcurrentMap<Integer, AtomicInteger> routeCountEachJob = new ConcurrentHashMap<>();
private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
AtomicInteger count = routeCountEachJob.get(jobId);
if (count == null || count.get() > 1000000) {
// 初始化时主动Random一次缓解首次压力
count = new AtomicInteger(new Random().nextInt(100));
} else {
// count++
count.addAndGet(1);
}
routeCountEachJob.put(jobId, count);
return count.get();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(count(triggerParam.getJobId())%addressList.size());
return new ReturnT<String>(address);
}
}

View File

@ -1,39 +0,0 @@
package com.xxl.job.admin.core.scheduler;
import com.xxl.job.admin.core.util.I18nUtil;
/**
* @author xuxueli 2020-10-29 21:11:23
*/
public enum MisfireStrategyEnum {
/**
* do nothing
*/
DO_NOTHING(I18nUtil.getString("misfire_strategy_do_nothing")),
/**
* fire once now
*/
FIRE_ONCE_NOW(I18nUtil.getString("misfire_strategy_fire_once_now"));
private String title;
MisfireStrategyEnum(String title) {
this.title = title;
}
public String getTitle() {
return title;
}
public static MisfireStrategyEnum match(String name, MisfireStrategyEnum defaultItem){
for (MisfireStrategyEnum item: MisfireStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
return defaultItem;
}
}

View File

@ -1,46 +0,0 @@
package com.xxl.job.admin.core.scheduler;
import com.xxl.job.admin.core.util.I18nUtil;
/**
* @author xuxueli 2020-10-29 21:11:23
*/
public enum ScheduleTypeEnum {
NONE(I18nUtil.getString("schedule_type_none")),
/**
* schedule by cron
*/
CRON(I18nUtil.getString("schedule_type_cron")),
/**
* schedule by fixed rate (in seconds)
*/
FIX_RATE(I18nUtil.getString("schedule_type_fix_rate")),
/**
* schedule by fix delay (in seconds) after the last time
*/
/*FIX_DELAY(I18nUtil.getString("schedule_type_fix_delay"))*/;
private String title;
ScheduleTypeEnum(String title) {
this.title = title;
}
public String getTitle() {
return title;
}
public static ScheduleTypeEnum match(String name, ScheduleTypeEnum defaultItem){
for (ScheduleTypeEnum item: ScheduleTypeEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
return defaultItem;
}
}

View File

@ -1,101 +0,0 @@
package com.xxl.job.admin.core.scheduler;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.thread.*;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.ExecutorBiz;
import com.xxl.job.core.biz.client.ExecutorBizClient;
import com.xxl.job.core.enums.ExecutorBlockStrategyEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author xuxueli 2018-10-28 00:18:17
*/
public class XxlJobScheduler {
private static final Logger logger = LoggerFactory.getLogger(XxlJobScheduler.class);
public void init() throws Exception {
// init i18n
initI18n();
// admin trigger pool start
JobTriggerPoolHelper.toStart();
// admin registry monitor run
JobRegistryHelper.getInstance().start();
// admin fail-monitor run
JobFailMonitorHelper.getInstance().start();
// admin lose-monitor run ( depend on JobTriggerPoolHelper )
JobCompleteHelper.getInstance().start();
// admin log report start
JobLogReportHelper.getInstance().start();
// start-schedule ( depend on JobTriggerPoolHelper )
JobScheduleHelper.getInstance().start();
logger.info(">>>>>>>>> init xxl-job admin success.");
}
public void destroy() throws Exception {
// stop-schedule
JobScheduleHelper.getInstance().toStop();
// admin log report stop
JobLogReportHelper.getInstance().toStop();
// admin lose-monitor stop
JobCompleteHelper.getInstance().toStop();
// admin fail-monitor stop
JobFailMonitorHelper.getInstance().toStop();
// admin registry stop
JobRegistryHelper.getInstance().toStop();
// admin trigger pool stop
JobTriggerPoolHelper.toStop();
}
// ---------------------- I18n ----------------------
private void initI18n(){
for (ExecutorBlockStrategyEnum item:ExecutorBlockStrategyEnum.values()) {
item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
}
}
// ---------------------- executor-client ----------------------
private static ConcurrentMap<String, ExecutorBiz> executorBizRepository = new ConcurrentHashMap<String, ExecutorBiz>();
public static ExecutorBiz getExecutorBiz(String address) throws Exception {
// valid
if (address==null || address.trim().length()==0) {
return null;
}
// load-cache
address = address.trim();
ExecutorBiz executorBiz = executorBizRepository.get(address);
if (executorBiz != null) {
return executorBiz;
}
// set-cache
executorBiz = new ExecutorBizClient(address, XxlJobAdminConfig.getAdminConfig().getAccessToken());
executorBizRepository.put(address, executorBiz);
return executorBiz;
}
}

View File

@ -1,185 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.complete.XxlJobCompleter;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.model.HandleCallbackParam;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.List;
import java.util.concurrent.*;
/**
* job lose-monitor instance
*
* @author xuxueli 2015-9-1 18:05:56
*/
public class JobCompleteHelper {
private static Logger logger = LoggerFactory.getLogger(JobCompleteHelper.class);
private static JobCompleteHelper instance = new JobCompleteHelper();
public static JobCompleteHelper getInstance(){
return instance;
}
// ---------------------- monitor ----------------------
private ThreadPoolExecutor callbackThreadPool = null;
private Thread monitorThread;
private volatile boolean toStop = false;
public void start(){
// for callback
callbackThreadPool = new ThreadPoolExecutor(
2,
20,
30L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(3000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "xxl-job, admin JobLosedMonitorHelper-callbackThreadPool-" + r.hashCode());
}
},
new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
r.run();
logger.warn(">>>>>>>>>>> xxl-job, callback too fast, match threadpool rejected handler(run now).");
}
});
// for monitor
monitorThread = new Thread(new Runnable() {
@Override
public void run() {
// wait for JobTriggerPoolHelper-init
try {
TimeUnit.MILLISECONDS.sleep(50);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
// monitor
while (!toStop) {
try {
// 任务结果丢失处理:调度记录停留在 "运行中" 状态超过10min且对应执行器心跳注册失败不在线则将本地调度主动标记失败
Date losedTime = DateUtil.addMinutes(new Date(), -10);
List<Long> losedJobIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findLostJobIds(losedTime);
if (losedJobIds!=null && losedJobIds.size()>0) {
for (Long logId: losedJobIds) {
XxlJobLog jobLog = new XxlJobLog();
jobLog.setId(logId);
jobLog.setHandleTime(new Date());
jobLog.setHandleCode(ReturnT.FAIL_CODE);
jobLog.setHandleMsg( I18nUtil.getString("joblog_lost_fail") );
XxlJobCompleter.updateHandleInfoAndFinish(jobLog);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> xxl-job, job fail monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(60);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> xxl-job, JobLosedMonitorHelper stop");
}
});
monitorThread.setDaemon(true);
monitorThread.setName("xxl-job, admin JobLosedMonitorHelper");
monitorThread.start();
}
public void toStop(){
toStop = true;
// stop registryOrRemoveThreadPool
callbackThreadPool.shutdownNow();
// stop monitorThread (interrupt and wait)
monitorThread.interrupt();
try {
monitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- helper ----------------------
public ReturnT<String> callback(List<HandleCallbackParam> callbackParamList) {
callbackThreadPool.execute(new Runnable() {
@Override
public void run() {
for (HandleCallbackParam handleCallbackParam: callbackParamList) {
ReturnT<String> callbackResult = callback(handleCallbackParam);
logger.debug(">>>>>>>>> JobApiController.callback {}, handleCallbackParam={}, callbackResult={}",
(callbackResult.getCode()== ReturnT.SUCCESS_CODE?"success":"fail"), handleCallbackParam, callbackResult);
}
}
});
return ReturnT.SUCCESS;
}
private ReturnT<String> callback(HandleCallbackParam handleCallbackParam) {
// valid log item
XxlJobLog log = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().load(handleCallbackParam.getLogId());
if (log == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "log item not found.");
}
if (log.getHandleCode() > 0) {
// avoid repeat callback, trigger child job etc
return new ReturnT<String>(ReturnT.FAIL_CODE, "log repeate callback.");
}
// handle msg
StringBuffer handleMsg = new StringBuffer();
if (log.getHandleMsg()!=null) {
handleMsg.append(log.getHandleMsg()).append("<br>");
}
if (handleCallbackParam.getHandleMsg() != null) {
handleMsg.append(handleCallbackParam.getHandleMsg());
}
// success, save log
log.setHandleTime(new Date());
log.setHandleCode(handleCallbackParam.getHandleCode());
log.setHandleMsg(handleMsg.toString());
XxlJobCompleter.updateHandleInfoAndFinish(log);
return ReturnT.SUCCESS;
}
}

View File

@ -1,110 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
import com.xxl.job.admin.core.util.I18nUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* job monitor instance
*
* @author xuxueli 2015-9-1 18:05:56
*/
public class JobFailMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class);
private static JobFailMonitorHelper instance = new JobFailMonitorHelper();
public static JobFailMonitorHelper getInstance(){
return instance;
}
// ---------------------- monitor ----------------------
private Thread monitorThread;
private volatile boolean toStop = false;
public void start(){
monitorThread = new Thread(new Runnable() {
@Override
public void run() {
// monitor
while (!toStop) {
try {
List<Long> failLogIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findFailJobLogIds(1000);
if (failLogIds!=null && !failLogIds.isEmpty()) {
for (long failLogId: failLogIds) {
// lock log
int lockRet = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().updateAlarmStatus(failLogId, 0, -1);
if (lockRet < 1) {
continue;
}
XxlJobLog log = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().load(failLogId);
XxlJobInfo info = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().loadById(log.getJobId());
// 1、fail retry monitor
if (log.getExecutorFailRetryCount() > 0) {
JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam(), null);
String retryMsg = "<br><br><span style=\"color:#F39C12;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<< </span><br>";
log.setTriggerMsg(log.getTriggerMsg() + retryMsg);
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().updateTriggerInfo(log);
}
// 2、fail alarm monitor
int newAlarmStatus = 0; // 告警状态0-默认、-1=锁定状态、1-无需告警、2-告警成功、3-告警失败
if (info != null) {
boolean alarmResult = XxlJobAdminConfig.getAdminConfig().getJobAlarmer().alarm(info, log);
newAlarmStatus = alarmResult?2:3;
} else {
newAlarmStatus = 1;
}
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().updateAlarmStatus(failLogId, -1, newAlarmStatus);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> xxl-job, job fail monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(10);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> xxl-job, job fail monitor thread stop");
}
});
monitorThread.setDaemon(true);
monitorThread.setName("xxl-job, admin JobFailMonitorHelper");
monitorThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
monitorThread.interrupt();
try {
monitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@ -1,152 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobLogReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* job log report helper
*
* @author xuxueli 2019-11-22
*/
public class JobLogReportHelper {
private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class);
private static JobLogReportHelper instance = new JobLogReportHelper();
public static JobLogReportHelper getInstance(){
return instance;
}
private Thread logrThread;
private volatile boolean toStop = false;
public void start(){
logrThread = new Thread(new Runnable() {
@Override
public void run() {
// last clean log time
long lastCleanLogTime = 0;
while (!toStop) {
// 1、log-report refresh: refresh log report in 3 days
try {
for (int i = 0; i < 3; i++) {
// today
Calendar itemDay = Calendar.getInstance();
itemDay.add(Calendar.DAY_OF_MONTH, -i);
itemDay.set(Calendar.HOUR_OF_DAY, 0);
itemDay.set(Calendar.MINUTE, 0);
itemDay.set(Calendar.SECOND, 0);
itemDay.set(Calendar.MILLISECOND, 0);
Date todayFrom = itemDay.getTime();
itemDay.set(Calendar.HOUR_OF_DAY, 23);
itemDay.set(Calendar.MINUTE, 59);
itemDay.set(Calendar.SECOND, 59);
itemDay.set(Calendar.MILLISECOND, 999);
Date todayTo = itemDay.getTime();
// refresh log-report every minute
XxlJobLogReport xxlJobLogReport = new XxlJobLogReport();
xxlJobLogReport.setTriggerDay(todayFrom);
xxlJobLogReport.setRunningCount(0);
xxlJobLogReport.setSucCount(0);
xxlJobLogReport.setFailCount(0);
Map<String, Object> triggerCountMap = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findLogReport(todayFrom, todayTo);
if (triggerCountMap!=null && triggerCountMap.size()>0) {
int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")?Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0;
int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")?Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0;
int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")?Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0;
int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc;
xxlJobLogReport.setRunningCount(triggerDayCountRunning);
xxlJobLogReport.setSucCount(triggerDayCountSuc);
xxlJobLogReport.setFailCount(triggerDayCountFail);
}
// do refresh
int ret = XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().update(xxlJobLogReport);
if (ret < 1) {
XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().save(xxlJobLogReport);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> xxl-job, job log report thread error:{}", e);
}
}
// 2、log-clean: switch open & once each day
if (XxlJobAdminConfig.getAdminConfig().getLogretentiondays()>0
&& System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) {
// expire-time
Calendar expiredDay = Calendar.getInstance();
expiredDay.add(Calendar.DAY_OF_MONTH, -1 * XxlJobAdminConfig.getAdminConfig().getLogretentiondays());
expiredDay.set(Calendar.HOUR_OF_DAY, 0);
expiredDay.set(Calendar.MINUTE, 0);
expiredDay.set(Calendar.SECOND, 0);
expiredDay.set(Calendar.MILLISECOND, 0);
Date clearBeforeTime = expiredDay.getTime();
// clean expired log
List<Long> logIds = null;
do {
logIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findClearLogIds(0, 0, clearBeforeTime, 0, 1000);
if (logIds!=null && logIds.size()>0) {
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().clearLog(logIds);
}
} while (logIds!=null && logIds.size()>0);
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
try {
TimeUnit.MINUTES.sleep(1);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> xxl-job, job log report thread stop");
}
});
logrThread.setDaemon(true);
logrThread.setName("xxl-job, admin JobLogReportHelper");
logrThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
logrThread.interrupt();
try {
logrThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@ -1,204 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobRegistry;
import com.xxl.job.core.biz.model.RegistryParam;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.enums.RegistryConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.concurrent.*;
/**
* job registry instance
* @author xuxueli 2016-10-02 19:10:24
*/
public class JobRegistryHelper {
private static Logger logger = LoggerFactory.getLogger(JobRegistryHelper.class);
private static JobRegistryHelper instance = new JobRegistryHelper();
public static JobRegistryHelper getInstance(){
return instance;
}
private ThreadPoolExecutor registryOrRemoveThreadPool = null;
private Thread registryMonitorThread;
private volatile boolean toStop = false;
public void start(){
// for registry or remove
registryOrRemoveThreadPool = new ThreadPoolExecutor(
2,
10,
30L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(2000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "xxl-job, admin JobRegistryMonitorHelper-registryOrRemoveThreadPool-" + r.hashCode());
}
},
new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
r.run();
logger.warn(">>>>>>>>>>> xxl-job, registry or remove too fast, match threadpool rejected handler(run now).");
}
});
// for monitor
registryMonitorThread = new Thread(new Runnable() {
@Override
public void run() {
while (!toStop) {
try {
// auto registry group
List<XxlJobGroup> groupList = XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().findByAddressType(0);
if (groupList!=null && !groupList.isEmpty()) {
// remove dead address (admin/executor)
List<Integer> ids = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findDead(RegistryConfig.DEAD_TIMEOUT, new Date());
if (ids!=null && ids.size()>0) {
XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().removeDead(ids);
}
// fresh online address (admin/executor)
HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>();
List<XxlJobRegistry> list = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (XxlJobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appname = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appname);
if (registryList == null) {
registryList = new ArrayList<String>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appname, registryList);
}
}
}
// fresh group address
for (XxlJobGroup group: groupList) {
List<String> registryList = appAddressMap.get(group.getAppname());
String addressListStr = null;
if (registryList!=null && !registryList.isEmpty()) {
Collections.sort(registryList);
StringBuilder addressListSB = new StringBuilder();
for (String item:registryList) {
addressListSB.append(item).append(",");
}
addressListStr = addressListSB.toString();
addressListStr = addressListStr.substring(0, addressListStr.length()-1);
}
group.setAddressList(addressListStr);
group.setUpdateTime(new Date());
XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().update(group);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> xxl-job, job registry monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(">>>>>>>>>>> xxl-job, job registry monitor thread error:{}", e);
}
}
}
logger.info(">>>>>>>>>>> xxl-job, job registry monitor thread stop");
}
});
registryMonitorThread.setDaemon(true);
registryMonitorThread.setName("xxl-job, admin JobRegistryMonitorHelper-registryMonitorThread");
registryMonitorThread.start();
}
public void toStop(){
toStop = true;
// stop registryOrRemoveThreadPool
registryOrRemoveThreadPool.shutdownNow();
// stop monitir (interrupt and wait)
registryMonitorThread.interrupt();
try {
registryMonitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- helper ----------------------
public ReturnT<String> registry(RegistryParam registryParam) {
// valid
if (!StringUtils.hasText(registryParam.getRegistryGroup())
|| !StringUtils.hasText(registryParam.getRegistryKey())
|| !StringUtils.hasText(registryParam.getRegistryValue())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "Illegal Argument.");
}
// async execute
registryOrRemoveThreadPool.execute(new Runnable() {
@Override
public void run() {
int ret = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().registryUpdate(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date());
if (ret < 1) {
XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().registrySave(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date());
// fresh
freshGroupRegistryInfo(registryParam);
}
}
});
return ReturnT.SUCCESS;
}
public ReturnT<String> registryRemove(RegistryParam registryParam) {
// valid
if (!StringUtils.hasText(registryParam.getRegistryGroup())
|| !StringUtils.hasText(registryParam.getRegistryKey())
|| !StringUtils.hasText(registryParam.getRegistryValue())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "Illegal Argument.");
}
// async execute
registryOrRemoveThreadPool.execute(new Runnable() {
@Override
public void run() {
int ret = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().registryDelete(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue());
if (ret > 0) {
// fresh
freshGroupRegistryInfo(registryParam);
}
}
});
return ReturnT.SUCCESS;
}
private void freshGroupRegistryInfo(RegistryParam registryParam){
// Under consideration, prevent affecting core tables
}
}

View File

@ -1,383 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.cron.CronExpression;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.scheduler.MisfireStrategyEnum;
import com.xxl.job.admin.core.scheduler.ScheduleTypeEnum;
import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
/**
* @author xuxueli 2019-05-21
*/
public class JobScheduleHelper {
private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class);
private static JobScheduleHelper instance = new JobScheduleHelper();
public static JobScheduleHelper getInstance(){
return instance;
}
// pre read
public static final long PRE_READ_MS = 5000;
private Thread scheduleThread;
private Thread ringThread;
private volatile boolean scheduleThreadToStop = false;
private volatile boolean ringThreadToStop = false;
private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>();
public void start(){
// schedule thread
scheduleThread = new Thread(new Runnable() {
@Override
public void run() {
try {
TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis()%1000 );
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>> init xxl-job admin scheduler success.");
// pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20)
int preReadCount = (XxlJobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + XxlJobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20;
while (!scheduleThreadToStop) {
// Scan Job
long start = System.currentTimeMillis();
Connection conn = null;
Boolean connAutoCommit = null;
PreparedStatement preparedStatement = null;
boolean preReadSuc = true;
try {
conn = XxlJobAdminConfig.getAdminConfig().getDataSource().getConnection();
connAutoCommit = conn.getAutoCommit();
conn.setAutoCommit(false);
preparedStatement = conn.prepareStatement( "select * from xxl_job_lock where lock_name = 'schedule_lock' for update" );
preparedStatement.execute();
// tx start
// 1、pre read
long nowTime = System.currentTimeMillis();
List<XxlJobInfo> scheduleList = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount);
if (scheduleList!=null && scheduleList.size()>0) {
// 2、push time-ring
for (XxlJobInfo jobInfo: scheduleList) {
// time-ring jump
if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) {
// 2.1、trigger-expire > 5spass && make next-trigger-time
logger.warn(">>>>>>>>>>> xxl-job, schedule misfire, jobId = " + jobInfo.getId());
// 1、misfire match
MisfireStrategyEnum misfireStrategyEnum = MisfireStrategyEnum.match(jobInfo.getMisfireStrategy(), MisfireStrategyEnum.DO_NOTHING);
if (MisfireStrategyEnum.FIRE_ONCE_NOW == misfireStrategyEnum) {
// FIRE_ONCE_NOW 》 trigger
JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.MISFIRE, -1, null, null, null);
logger.debug(">>>>>>>>>>> xxl-job, schedule push trigger : jobId = " + jobInfo.getId() );
}
// 2、fresh next
refreshNextValidTime(jobInfo, new Date());
} else if (nowTime > jobInfo.getTriggerNextTime()) {
// 2.2、trigger-expire < 5sdirect-trigger && make next-trigger-time
// 1、trigger
JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null, null);
logger.debug(">>>>>>>>>>> xxl-job, schedule push trigger : jobId = " + jobInfo.getId() );
// 2、fresh next
refreshNextValidTime(jobInfo, new Date());
// next-trigger-time in 5s, pre-read again
if (jobInfo.getTriggerStatus()==1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) {
// 1、make ring second
int ringSecond = (int)((jobInfo.getTriggerNextTime()/1000)%60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
} else {
// 2.3、trigger-pre-readtime-ring trigger && make next-trigger-time
// 1、make ring second
int ringSecond = (int)((jobInfo.getTriggerNextTime()/1000)%60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
}
// 3、update trigger info
for (XxlJobInfo jobInfo: scheduleList) {
XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().scheduleUpdate(jobInfo);
}
} else {
preReadSuc = false;
}
// tx stop
} catch (Exception e) {
if (!scheduleThreadToStop) {
logger.error(">>>>>>>>>>> xxl-job, JobScheduleHelper#scheduleThread error:{}", e);
}
} finally {
// commit
if (conn != null) {
try {
conn.commit();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.setAutoCommit(connAutoCommit);
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
// close PreparedStatement
if (null != preparedStatement) {
try {
preparedStatement.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
long cost = System.currentTimeMillis()-start;
// Wait seconds, align second
if (cost < 1000) { // scan-overtime, not wait
try {
// pre-read period: success > scan each second; fail > skip this period;
TimeUnit.MILLISECONDS.sleep((preReadSuc?1000:PRE_READ_MS) - System.currentTimeMillis()%1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
logger.info(">>>>>>>>>>> xxl-job, JobScheduleHelper#scheduleThread stop");
}
});
scheduleThread.setDaemon(true);
scheduleThread.setName("xxl-job, admin JobScheduleHelper#scheduleThread");
scheduleThread.start();
// ring thread
ringThread = new Thread(new Runnable() {
@Override
public void run() {
while (!ringThreadToStop) {
// align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
// second data
List<Integer> ringItemData = new ArrayList<>();
int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度;
for (int i = 0; i < 2; i++) {
List<Integer> tmpData = ringData.remove( (nowSecond+60-i)%60 );
if (tmpData != null) {
ringItemData.addAll(tmpData);
}
}
// ring trigger
logger.debug(">>>>>>>>>>> xxl-job, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData) );
if (ringItemData.size() > 0) {
// do trigger
for (int jobId: ringItemData) {
// do trigger
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null, null);
}
// clear
ringItemData.clear();
}
} catch (Exception e) {
if (!ringThreadToStop) {
logger.error(">>>>>>>>>>> xxl-job, JobScheduleHelper#ringThread error:{}", e);
}
}
}
logger.info(">>>>>>>>>>> xxl-job, JobScheduleHelper#ringThread stop");
}
});
ringThread.setDaemon(true);
ringThread.setName("xxl-job, admin JobScheduleHelper#ringThread");
ringThread.start();
}
private void refreshNextValidTime(XxlJobInfo jobInfo, Date fromTime) throws Exception {
try {
Date nextValidTime = generateNextValidTime(jobInfo, fromTime);
if (nextValidTime != null) {
jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime());
jobInfo.setTriggerNextTime(nextValidTime.getTime());
} else {
// generateNextValidTime fail, stop job
jobInfo.setTriggerStatus(0);
jobInfo.setTriggerLastTime(0);
jobInfo.setTriggerNextTime(0);
logger.error(">>>>>>>>>>> xxl-job, refreshNextValidTime fail for job: jobId={}, scheduleType={}, scheduleConf={}",
jobInfo.getId(), jobInfo.getScheduleType(), jobInfo.getScheduleConf());
}
} catch (Exception e) {
// generateNextValidTime error, stop job
jobInfo.setTriggerStatus(0);
jobInfo.setTriggerLastTime(0);
jobInfo.setTriggerNextTime(0);
logger.error(">>>>>>>>>>> xxl-job, refreshNextValidTime error for job: jobId={}, scheduleType={}, scheduleConf={}",
jobInfo.getId(), jobInfo.getScheduleType(), jobInfo.getScheduleConf(), e);
}
}
private void pushTimeRing(int ringSecond, int jobId){
// push async ring
List<Integer> ringItemData = ringData.get(ringSecond);
if (ringItemData == null) {
ringItemData = new ArrayList<Integer>();
ringData.put(ringSecond, ringItemData);
}
ringItemData.add(jobId);
logger.debug(">>>>>>>>>>> xxl-job, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData) );
}
public void toStop(){
// 1、stop schedule
scheduleThreadToStop = true;
try {
// wait
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (scheduleThread.getState() != Thread.State.TERMINATED){
// interrupt and wait
scheduleThread.interrupt();
try {
scheduleThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// if has ring data
boolean hasRingData = false;
if (!ringData.isEmpty()) {
for (int second : ringData.keySet()) {
List<Integer> tmpData = ringData.get(second);
if (tmpData!=null && tmpData.size()>0) {
hasRingData = true;
break;
}
}
}
if (hasRingData) {
try {
TimeUnit.SECONDS.sleep(8);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop ring (wait job-in-memory stop)
ringThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (ringThread.getState() != Thread.State.TERMINATED){
// interrupt and wait
ringThread.interrupt();
try {
ringThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> xxl-job, JobScheduleHelper stop");
}
// ---------------------- tools ----------------------
public static Date generateNextValidTime(XxlJobInfo jobInfo, Date fromTime) throws Exception {
ScheduleTypeEnum scheduleTypeEnum = ScheduleTypeEnum.match(jobInfo.getScheduleType(), null);
if (ScheduleTypeEnum.CRON == scheduleTypeEnum) {
Date nextValidTime = new CronExpression(jobInfo.getScheduleConf()).getNextValidTimeAfter(fromTime);
return nextValidTime;
/*|| ScheduleTypeEnum.FIX_DELAY == scheduleTypeEnum*/
} else if (ScheduleTypeEnum.FIX_RATE == scheduleTypeEnum ) {
return new Date(fromTime.getTime() + Integer.valueOf(jobInfo.getScheduleConf())*1000 );
}
return null;
}
}

View File

@ -1,152 +0,0 @@
package com.xxl.job.admin.core.thread;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
import com.xxl.job.admin.core.trigger.XxlJobTrigger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* job trigger thread pool helper
*
* @author xuxueli 2018-07-03 21:08:07
*/
public class JobTriggerPoolHelper {
private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
// ---------------------- trigger pool ----------------------
// fast/slow thread pool
private ThreadPoolExecutor fastTriggerPool = null;
private ThreadPoolExecutor slowTriggerPool = null;
public void start(){
fastTriggerPool = new ThreadPoolExecutor(
10,
XxlJobAdminConfig.getAdminConfig().getTriggerPoolFastMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(1000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "xxl-job, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode());
}
});
slowTriggerPool = new ThreadPoolExecutor(
10,
XxlJobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(2000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "xxl-job, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode());
}
});
}
public void stop() {
//triggerPool.shutdown();
fastTriggerPool.shutdownNow();
slowTriggerPool.shutdownNow();
logger.info(">>>>>>>>> xxl-job trigger thread pool shutdown success.");
}
// job timeout count
// ms > min
private final ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
private volatile long minTim = System.currentTimeMillis()/60000;
/**
* add trigger
*/
public void addTrigger(final int jobId,
final TriggerTypeEnum triggerType,
final int failRetryCount,
final String executorShardingParam,
final String executorParam,
final String addressList) {
// choose thread pool
ThreadPoolExecutor triggerPool = fastTriggerPool;
AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId);
// job-timeout 10 times in 1 min
if (jobTimeoutCount!=null && jobTimeoutCount.get() > 10) {
triggerPool = slowTriggerPool;
}
// trigger
triggerPool.execute(new Runnable() {
@Override
public void run() {
long start = System.currentTimeMillis();
try {
// do trigger
XxlJobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam, addressList);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
// check timeout-count-map
long minTimNow = System.currentTimeMillis()/60000;
if (minTim != minTimNow) {
minTim = minTimNow;
jobTimeoutCountMap.clear();
}
// incr timeout-count-map
long cost = System.currentTimeMillis()-start;
if (cost > 500) { // ob-timeout threshold 500ms
AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1));
if (timeoutCount != null) {
timeoutCount.incrementAndGet();
}
}
}
}
});
}
// ---------------------- helper ----------------------
private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper();
public static void toStart() {
helper.start();
}
public static void toStop() {
helper.stop();
}
/**
* @param jobId
* @param triggerType
* @param failRetryCount
* >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam
* null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam, String addressList) {
helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam, addressList);
}
}

View File

@ -1,27 +0,0 @@
package com.xxl.job.admin.core.trigger;
import com.xxl.job.admin.core.util.I18nUtil;
/**
* trigger type enum
*
* @author xuxueli 2018-09-16 04:56:41
*/
public enum TriggerTypeEnum {
MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")),
CRON(I18nUtil.getString("jobconf_trigger_type_cron")),
RETRY(I18nUtil.getString("jobconf_trigger_type_retry")),
PARENT(I18nUtil.getString("jobconf_trigger_type_parent")),
API(I18nUtil.getString("jobconf_trigger_type_api")),
MISFIRE(I18nUtil.getString("jobconf_trigger_type_misfire"));
private TriggerTypeEnum(String title){
this.title = title;
}
private String title;
public String getTitle() {
return title;
}
}

View File

@ -1,228 +0,0 @@
package com.xxl.job.admin.core.trigger;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.route.ExecutorRouteStrategyEnum;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.core.biz.ExecutorBiz;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.biz.model.TriggerParam;
import com.xxl.job.core.enums.ExecutorBlockStrategyEnum;
import com.xxl.job.core.util.IpUtil;
import com.xxl.job.core.util.ThrowableUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
/**
* xxl-job trigger
* Created by xuxueli on 17/7/13.
*/
public class XxlJobTrigger {
private static Logger logger = LoggerFactory.getLogger(XxlJobTrigger.class);
/**
* trigger job
*
* @param jobId
* @param triggerType
* @param failRetryCount
* >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam
* null: use job param
* not null: cover job param
* @param addressList
* null: use executor addressList
* not null: cover
*/
public static void trigger(int jobId,
TriggerTypeEnum triggerType,
int failRetryCount,
String executorShardingParam,
String executorParam,
String addressList) {
// load data
XxlJobInfo jobInfo = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().loadById(jobId);
if (jobInfo == null) {
logger.warn(">>>>>>>>>>>> trigger fail, jobId invalidjobId={}", jobId);
return;
}
if (executorParam != null) {
jobInfo.setExecutorParam(executorParam);
}
int finalFailRetryCount = failRetryCount>=0?failRetryCount:jobInfo.getExecutorFailRetryCount();
XxlJobGroup group = XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().load(jobInfo.getJobGroup());
// cover addressList
if (addressList!=null && addressList.trim().length()>0) {
group.setAddressType(1);
group.setAddressList(addressList.trim());
}
// sharding param
int[] shardingParam = null;
if (executorShardingParam!=null){
String[] shardingArr = executorShardingParam.split("/");
if (shardingArr.length==2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) {
shardingParam = new int[2];
shardingParam[0] = Integer.valueOf(shardingArr[0]);
shardingParam[1] = Integer.valueOf(shardingArr[1]);
}
}
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST==ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null)
&& group.getRegistryList()!=null && !group.getRegistryList().isEmpty()
&& shardingParam==null) {
for (int i = 0; i < group.getRegistryList().size(); i++) {
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size());
}
} else {
if (shardingParam == null) {
shardingParam = new int[]{0, 1};
}
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]);
}
}
private static boolean isNumeric(String str){
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
/**
* @param group job group, registry list may be empty
* @param jobInfo
* @param finalFailRetryCount
* @param triggerType
* @param index sharding index
* @param total sharding index
*/
private static void processTrigger(XxlJobGroup group, XxlJobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total){
// param
// block strategy
ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION);
// route strategy
ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null);
String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST==executorRouteStrategyEnum)?String.valueOf(index).concat("/").concat(String.valueOf(total)):null;
// 1、save log-id
XxlJobLog jobLog = new XxlJobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(new Date());
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().save(jobLog);
logger.debug(">>>>>>>>>>> xxl-job trigger start, jobId:{}", jobLog.getId());
// 2、init trigger-param
TriggerParam triggerParam = new TriggerParam();
triggerParam.setJobId(jobInfo.getId());
triggerParam.setExecutorHandler(jobInfo.getExecutorHandler());
triggerParam.setExecutorParams(jobInfo.getExecutorParam());
triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout());
triggerParam.setLogId(jobLog.getId());
triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime());
triggerParam.setGlueType(jobInfo.getGlueType());
triggerParam.setGlueSource(jobInfo.getGlueSource());
triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime());
triggerParam.setBroadcastIndex(index);
triggerParam.setBroadcastTotal(total);
// 3、init address
String address = null;
ReturnT<String> routeAddressResult = null;
if (group.getRegistryList()!=null && !group.getRegistryList().isEmpty()) {
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) {
if (index < group.getRegistryList().size()) {
address = group.getRegistryList().get(index);
} else {
address = group.getRegistryList().get(0);
}
} else {
routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList());
if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) {
address = routeAddressResult.getContent();
}
}
} else {
routeAddressResult = new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty"));
}
// 4、trigger remote executor
ReturnT<String> triggerResult = null;
if (address != null) {
triggerResult = runExecutor(triggerParam, address);
} else {
triggerResult = new ReturnT<String>(ReturnT.FAIL_CODE, null);
}
// 5、collection trigger info
StringBuffer triggerMsgSb = new StringBuffer();
triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append("").append(triggerType.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append("").append(IpUtil.getIp());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append("")
.append( (group.getAddressType() == 0)?I18nUtil.getString("jobgroup_field_addressType_0"):I18nUtil.getString("jobgroup_field_addressType_1") );
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append("").append(group.getRegistryList());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append("").append(executorRouteStrategyEnum.getTitle());
if (shardingParam != null) {
triggerMsgSb.append("("+shardingParam+")");
}
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append("").append(blockStrategy.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_timeout")).append("").append(jobInfo.getExecutorTimeout());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append("").append(finalFailRetryCount);
triggerMsgSb.append("<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_run") +"<<<<<<<<<<< </span><br>")
.append((routeAddressResult!=null&&routeAddressResult.getMsg()!=null)?routeAddressResult.getMsg()+"<br><br>":"").append(triggerResult.getMsg()!=null?triggerResult.getMsg():"");
// 6、save log trigger-info
jobLog.setExecutorAddress(address);
jobLog.setExecutorHandler(jobInfo.getExecutorHandler());
jobLog.setExecutorParam(jobInfo.getExecutorParam());
jobLog.setExecutorShardingParam(shardingParam);
jobLog.setExecutorFailRetryCount(finalFailRetryCount);
//jobLog.setTriggerTime();
jobLog.setTriggerCode(triggerResult.getCode());
jobLog.setTriggerMsg(triggerMsgSb.toString());
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().updateTriggerInfo(jobLog);
logger.debug(">>>>>>>>>>> xxl-job trigger end, jobId:{}", jobLog.getId());
}
/**
* run executor
* @param triggerParam
* @param address
* @return
*/
public static ReturnT<String> runExecutor(TriggerParam triggerParam, String address){
ReturnT<String> runResult = null;
try {
ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(address);
runResult = executorBiz.run(triggerParam);
} catch (Exception e) {
logger.error(">>>>>>>>>>> xxl-job trigger error, please check if the executor[{}] is running.", address, e);
runResult = new ReturnT<String>(ReturnT.FAIL_CODE, ThrowableUtil.toString(e));
}
StringBuffer runResultSB = new StringBuffer(I18nUtil.getString("jobconf_trigger_run") + "");
runResultSB.append("<br>address").append(address);
runResultSB.append("<br>code").append(runResult.getCode());
runResultSB.append("<br>msg").append(runResult.getMsg());
runResult.setMsg(runResultSB.toString());
return runResult;
}
}

View File

@ -1,98 +0,0 @@
package com.xxl.job.admin.core.util;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Cookie.Util
*
* @author xuxueli 2015-12-12 18:01:06
*/
public class CookieUtil {
// 默认缓存时间,单位/秒, 2H
private static final int COOKIE_MAX_AGE = Integer.MAX_VALUE;
// 保存路径,根路径
private static final String COOKIE_PATH = "/";
/**
*
*
* @param response
* @param key
* @param value
* @param ifRemember
*/
public static void set(HttpServletResponse response, String key, String value, boolean ifRemember) {
int age = ifRemember?COOKIE_MAX_AGE:-1;
set(response, key, value, null, COOKIE_PATH, age, true);
}
/**
*
*
* @param response
* @param key
* @param value
* @param maxAge
*/
private static void set(HttpServletResponse response, String key, String value, String domain, String path, int maxAge, boolean isHttpOnly) {
Cookie cookie = new Cookie(key, value);
if (domain != null) {
cookie.setDomain(domain);
}
cookie.setPath(path);
cookie.setMaxAge(maxAge);
cookie.setHttpOnly(isHttpOnly);
response.addCookie(cookie);
}
/**
* value
*
* @param request
* @param key
* @return
*/
public static String getValue(HttpServletRequest request, String key) {
Cookie cookie = get(request, key);
if (cookie != null) {
return cookie.getValue();
}
return null;
}
/**
* Cookie
*
* @param request
* @param key
*/
private static Cookie get(HttpServletRequest request, String key) {
Cookie[] arr_cookie = request.getCookies();
if (arr_cookie != null && arr_cookie.length > 0) {
for (Cookie cookie : arr_cookie) {
if (cookie.getName().equals(key)) {
return cookie;
}
}
}
return null;
}
/**
* Cookie
*
* @param request
* @param response
* @param key
*/
public static void remove(HttpServletRequest request, HttpServletResponse response, String key) {
Cookie cookie = get(request, key);
if (cookie != null) {
set(response, key, "", null, COOKIE_PATH, 0, true);
}
}
}

View File

@ -1,31 +0,0 @@
package com.xxl.job.admin.core.util;
import freemarker.ext.beans.BeansWrapper;
import freemarker.ext.beans.BeansWrapperBuilder;
import freemarker.template.Configuration;
import freemarker.template.TemplateHashModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ftl util
*
* @author xuxueli 2018-01-17 20:37:48
*/
public class FtlUtil {
private static Logger logger = LoggerFactory.getLogger(FtlUtil.class);
//BeansWrapper.getDefaultInstance();
private static BeansWrapper wrapper = new BeansWrapperBuilder(Configuration.DEFAULT_INCOMPATIBLE_IMPROVEMENTS).build();
public static TemplateHashModel generateStaticModel(String packageName) {
try {
TemplateHashModel staticModels = wrapper.getStaticModels();
TemplateHashModel fileStatics = (TemplateHashModel) staticModels.get(packageName);
return fileStatics;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@ -1,79 +0,0 @@
package com.xxl.job.admin.core.util;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* i18n util
*
* @author xuxueli 2018-01-17 20:39:06
*/
public class I18nUtil {
private static Logger logger = LoggerFactory.getLogger(I18nUtil.class);
private static Properties prop = null;
public static Properties loadI18nProp(){
if (prop != null) {
return prop;
}
try {
// build i18n prop
String i18n = XxlJobAdminConfig.getAdminConfig().getI18n();
String i18nFile = MessageFormat.format("i18n/message_{0}.properties", i18n);
// load prop
Resource resource = new ClassPathResource(i18nFile);
EncodedResource encodedResource = new EncodedResource(resource,"UTF-8");
prop = PropertiesLoaderUtils.loadProperties(encodedResource);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return prop;
}
/**
* get val of i18n key
*
* @param key
* @return
*/
public static String getString(String key) {
return loadI18nProp().getProperty(key);
}
/**
* get mult val of i18n mult key, as json
*
* @param keys
* @return
*/
public static String getMultString(String... keys) {
Map<String, String> map = new HashMap<String, String>();
Properties prop = loadI18nProp();
if (keys!=null && keys.length>0) {
for (String key: keys) {
map.put(key, prop.getProperty(key));
}
} else {
for (String key: prop.stringPropertyNames()) {
map.put(key, prop.getProperty(key));
}
}
String json = JacksonUtil.writeValueAsString(map);
return json;
}
}

View File

@ -1,92 +0,0 @@
package com.xxl.job.admin.core.util;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Jackson util
*
* 1obj need private and set/get
* 2do not support inner class
*
* @author xuxueli 2015-9-25 18:02:56
*/
public class JacksonUtil {
private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class);
private final static ObjectMapper objectMapper = new ObjectMapper();
public static ObjectMapper getInstance() {
return objectMapper;
}
/**
* beanarrayListMap --> json
*
* @param obj
* @return json string
* @throws Exception
*/
public static String writeValueAsString(Object obj) {
try {
return getInstance().writeValueAsString(obj);
} catch (JsonGenerationException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> beanMapList(array)
*
* @param jsonStr
* @param clazz
* @return obj
* @throws Exception
*/
public static <T> T readValue(String jsonStr, Class<T> clazz) {
try {
return getInstance().readValue(jsonStr, clazz);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> List<Bean>...
*
* @param jsonStr
* @param parametrized
* @param parameterClasses
* @param <T>
* @return
*/
public static <T> T readValue(String jsonStr, Class<?> parametrized, Class<?>... parameterClasses) {
try {
JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses);
return getInstance().readValue(jsonStr, javaType);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@ -1,133 +0,0 @@
package com.xxl.job.admin.core.util;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* local cache tool
*
* @author xuxueli 2018-01-22 21:37:34
*/
public class LocalCacheUtil {
// 类型建议用抽象父类,兼容性更好;
private static final ConcurrentMap<String, LocalCacheData> cacheRepository = new ConcurrentHashMap<String, LocalCacheData>();
private static class LocalCacheData{
private String key;
private Object val;
private long timeoutTime;
public LocalCacheData() {
}
public LocalCacheData(String key, Object val, long timeoutTime) {
this.key = key;
this.val = val;
this.timeoutTime = timeoutTime;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Object getVal() {
return val;
}
public void setVal(Object val) {
this.val = val;
}
public long getTimeoutTime() {
return timeoutTime;
}
public void setTimeoutTime(long timeoutTime) {
this.timeoutTime = timeoutTime;
}
}
/**
* set cache
*
* @param key
* @param val
* @param cacheTime
* @return
*/
public static boolean set(String key, Object val, long cacheTime){
// clean timeout cache, before set new cache (avoid cache too much)
cleanTimeoutCache();
// set new cache
if (key==null || key.trim().length()==0) {
return false;
}
if (val == null) {
remove(key);
}
if (cacheTime <= 0) {
remove(key);
}
long timeoutTime = System.currentTimeMillis() + cacheTime;
LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime);
cacheRepository.put(localCacheData.getKey(), localCacheData);
return true;
}
/**
* remove cache
*
* @param key
* @return
*/
public static boolean remove(String key){
if (key==null || key.trim().length()==0) {
return false;
}
cacheRepository.remove(key);
return true;
}
/**
* get cache
*
* @param key
* @return
*/
public static Object get(String key){
if (key==null || key.trim().length()==0) {
return null;
}
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()<localCacheData.getTimeoutTime()) {
return localCacheData.getVal();
} else {
remove(key);
return null;
}
}
/**
* clean timeout cache
*
* @return
*/
public static boolean cleanTimeoutCache(){
if (!cacheRepository.keySet().isEmpty()) {
for (String key: cacheRepository.keySet()) {
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()>=localCacheData.getTimeoutTime()) {
cacheRepository.remove(key);
}
}
}
return true;
}
}

View File

@ -1,37 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobGroup;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* Created by xuxueli on 16/9/30.
*/
@Mapper
public interface XxlJobGroupDao {
public List<XxlJobGroup> findAll();
public List<XxlJobGroup> findByAddressType(@Param("addressType") int addressType);
public int save(XxlJobGroup xxlJobGroup);
public int update(XxlJobGroup xxlJobGroup);
public int remove(@Param("id") int id);
public XxlJobGroup load(@Param("id") int id);
public List<XxlJobGroup> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("appname") String appname,
@Param("title") String title);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("appname") String appname,
@Param("title") String title);
}

View File

@ -1,49 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobInfo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* job info
* @author xuxueli 2016-1-12 18:03:45
*/
@Mapper
public interface XxlJobInfoDao {
public List<XxlJobInfo> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler,
@Param("author") String author);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler,
@Param("author") String author);
public int save(XxlJobInfo info);
public XxlJobInfo loadById(@Param("id") int id);
public int update(XxlJobInfo xxlJobInfo);
public int delete(@Param("id") long id);
public List<XxlJobInfo> getJobsByGroup(@Param("jobGroup") int jobGroup);
public int findAllCount();
public List<XxlJobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize );
public int scheduleUpdate(XxlJobInfo xxlJobInfo);
}

View File

@ -1,62 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobLog;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* job log
* @author xuxueli 2016-1-12 18:03:06
*/
@Mapper
public interface XxlJobLogDao {
// exist jobId not use jobGroup, not exist use jobGroup
public List<XxlJobLog> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
public XxlJobLog load(@Param("id") long id);
public long save(XxlJobLog xxlJobLog);
public int updateTriggerInfo(XxlJobLog xxlJobLog);
public int updateHandleInfo(XxlJobLog xxlJobLog);
public int delete(@Param("jobId") int jobId);
public Map<String, Object> findLogReport(@Param("from") Date from,
@Param("to") Date to);
public List<Long> findClearLogIds(@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("clearBeforeTime") Date clearBeforeTime,
@Param("clearBeforeNum") int clearBeforeNum,
@Param("pagesize") int pagesize);
public int clearLog(@Param("logIds") List<Long> logIds);
public List<Long> findFailJobLogIds(@Param("pagesize") int pagesize);
public int updateAlarmStatus(@Param("logId") long logId,
@Param("oldAlarmStatus") int oldAlarmStatus,
@Param("newAlarmStatus") int newAlarmStatus);
public List<Long> findLostJobIds(@Param("losedTime") Date losedTime);
}

View File

@ -1,24 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobLogGlue;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* job log for glue
* @author xuxueli 2016-5-19 18:04:56
*/
@Mapper
public interface XxlJobLogGlueDao {
public int save(XxlJobLogGlue xxlJobLogGlue);
public List<XxlJobLogGlue> findByJobId(@Param("jobId") int jobId);
public int removeOld(@Param("jobId") int jobId, @Param("limit") int limit);
public int deleteByJobId(@Param("jobId") int jobId);
}

View File

@ -1,26 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobLogReport;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
/**
* job log
* @author xuxueli 2019-11-22
*/
@Mapper
public interface XxlJobLogReportDao {
public int save(XxlJobLogReport xxlJobLogReport);
public int update(XxlJobLogReport xxlJobLogReport);
public List<XxlJobLogReport> queryLogReport(@Param("triggerDayFrom") Date triggerDayFrom,
@Param("triggerDayTo") Date triggerDayTo);
public XxlJobLogReport queryLogReportTotal();
}

View File

@ -1,38 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobRegistry;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
/**
* Created by xuxueli on 16/9/30.
*/
@Mapper
public interface XxlJobRegistryDao {
public List<Integer> findDead(@Param("timeout") int timeout,
@Param("nowTime") Date nowTime);
public int removeDead(@Param("ids") List<Integer> ids);
public List<XxlJobRegistry> findAll(@Param("timeout") int timeout,
@Param("nowTime") Date nowTime);
public int registryUpdate(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue,
@Param("updateTime") Date updateTime);
public int registrySave(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue,
@Param("updateTime") Date updateTime);
public int registryDelete(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue);
}

View File

@ -1,31 +0,0 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobUser;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author xuxueli 2019-05-04 16:44:59
*/
@Mapper
public interface XxlJobUserDao {
public List<XxlJobUser> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("username") String username,
@Param("role") int role);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("username") String username,
@Param("role") int role);
public XxlJobUser loadByUserName(@Param("username") String username);
public int save(XxlJobUser xxlJobUser);
public int update(XxlJobUser xxlJobUser);
public int delete(@Param("id") int id);
}

View File

@ -1,98 +0,0 @@
package com.xxl.job.admin.service;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.core.biz.model.ReturnT;
import java.util.Date;
import java.util.Map;
/**
* core job action for xxl-job
*
* @author xuxueli 2016-5-28 15:30:33
*/
public interface XxlJobService {
/**
* page list
*
* @param start
* @param length
* @param jobGroup
* @param jobDesc
* @param executorHandler
* @param author
* @return
*/
public Map<String, Object> pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author);
/**
* add job
*
* @param jobInfo
* @return
*/
public ReturnT<String> add(XxlJobInfo jobInfo, XxlJobUser loginUser);
/**
* update job
*
* @param jobInfo
* @return
*/
public ReturnT<String> update(XxlJobInfo jobInfo, XxlJobUser loginUser);
/**
* remove job
* *
* @param id
* @return
*/
public ReturnT<String> remove(int id);
/**
* start job
*
* @param id
* @return
*/
public ReturnT<String> start(int id);
/**
* stop job
*
* @param id
* @return
*/
public ReturnT<String> stop(int id);
/**
* trigger
*
* @param loginUser
* @param jobId
* @param executorParam
* @param addressList
* @return
*/
public ReturnT<String> trigger(XxlJobUser loginUser, int jobId, String executorParam, String addressList);
/**
* dashboard info
*
* @return
*/
public Map<String,Object> dashboardInfo();
/**
* chart info
*
* @param startDate
* @param endDate
* @return
*/
public ReturnT<Map<String,Object>> chartInfo(Date startDate, Date endDate);
}

View File

@ -1,35 +0,0 @@
package com.xxl.job.admin.service.impl;
import com.xxl.job.admin.core.thread.JobCompleteHelper;
import com.xxl.job.admin.core.thread.JobRegistryHelper;
import com.xxl.job.core.biz.AdminBiz;
import com.xxl.job.core.biz.model.HandleCallbackParam;
import com.xxl.job.core.biz.model.RegistryParam;
import com.xxl.job.core.biz.model.ReturnT;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author xuxueli 2017-07-27 21:54:20
*/
@Service
public class AdminBizImpl implements AdminBiz {
@Override
public ReturnT<String> callback(List<HandleCallbackParam> callbackParamList) {
return JobCompleteHelper.getInstance().callback(callbackParamList);
}
@Override
public ReturnT<String> registry(RegistryParam registryParam) {
return JobRegistryHelper.getInstance().registry(registryParam);
}
@Override
public ReturnT<String> registryRemove(RegistryParam registryParam) {
return JobRegistryHelper.getInstance().registryRemove(registryParam);
}
}

View File

@ -1,112 +0,0 @@
package com.xxl.job.admin.service.impl;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.util.CookieUtil;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.core.util.JacksonUtil;
import com.xxl.job.admin.dao.XxlJobUserDao;
import com.xxl.job.core.biz.model.ReturnT;
import org.springframework.stereotype.Service;
import org.springframework.util.DigestUtils;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.math.BigInteger;
/**
* @author xuxueli 2019-05-04 22:13:264
*/
@Service
public class LoginService {
public static final String LOGIN_IDENTITY_KEY = "XXL_JOB_LOGIN_IDENTITY";
@Resource
private XxlJobUserDao xxlJobUserDao;
// ---------------------- token tool ----------------------
private String makeToken(XxlJobUser xxlJobUser){
String tokenJson = JacksonUtil.writeValueAsString(xxlJobUser);
String tokenHex = new BigInteger(tokenJson.getBytes()).toString(16);
return tokenHex;
}
private XxlJobUser parseToken(String tokenHex){
XxlJobUser xxlJobUser = null;
if (tokenHex != null) {
// username_password(md5)
String tokenJson = new String(new BigInteger(tokenHex, 16).toByteArray());
xxlJobUser = JacksonUtil.readValue(tokenJson, XxlJobUser.class);
}
return xxlJobUser;
}
// ---------------------- login tool, with cookie and db ----------------------
public ReturnT<String> login(HttpServletRequest request, HttpServletResponse response, String username, String password, boolean ifRemember){
// param
if (username==null || username.trim().length()==0 || password==null || password.trim().length()==0){
return new ReturnT<String>(500, I18nUtil.getString("login_param_empty"));
}
// valid passowrd
XxlJobUser xxlJobUser = xxlJobUserDao.loadByUserName(username);
if (xxlJobUser == null) {
return new ReturnT<String>(500, I18nUtil.getString("login_param_unvalid"));
}
String passwordMd5 = DigestUtils.md5DigestAsHex(password.getBytes());
if (!passwordMd5.equals(xxlJobUser.getPassword())) {
return new ReturnT<String>(500, I18nUtil.getString("login_param_unvalid"));
}
String loginToken = makeToken(xxlJobUser);
// do login
CookieUtil.set(response, LOGIN_IDENTITY_KEY, loginToken, ifRemember);
return ReturnT.SUCCESS;
}
/**
* logout
*
* @param request
* @param response
*/
public ReturnT<String> logout(HttpServletRequest request, HttpServletResponse response){
CookieUtil.remove(request, response, LOGIN_IDENTITY_KEY);
return ReturnT.SUCCESS;
}
/**
* logout
*
* @param request
* @return
*/
public XxlJobUser ifLogin(HttpServletRequest request, HttpServletResponse response){
String cookieToken = CookieUtil.getValue(request, LOGIN_IDENTITY_KEY);
if (cookieToken != null) {
XxlJobUser cookieUser = null;
try {
cookieUser = parseToken(cookieToken);
} catch (Exception e) {
logout(request, response);
}
if (cookieUser != null) {
XxlJobUser dbUser = xxlJobUserDao.loadByUserName(cookieUser.getUsername());
if (dbUser != null) {
if (cookieUser.getPassword().equals(dbUser.getPassword())) {
return dbUser;
}
}
}
}
return null;
}
}

View File

@ -1,486 +0,0 @@
package com.xxl.job.admin.service.impl;
import com.xxl.job.admin.core.cron.CronExpression;
import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLogReport;
import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.route.ExecutorRouteStrategyEnum;
import com.xxl.job.admin.core.scheduler.MisfireStrategyEnum;
import com.xxl.job.admin.core.scheduler.ScheduleTypeEnum;
import com.xxl.job.admin.core.thread.JobScheduleHelper;
import com.xxl.job.admin.core.thread.JobTriggerPoolHelper;
import com.xxl.job.admin.core.trigger.TriggerTypeEnum;
import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.*;
import com.xxl.job.admin.service.XxlJobService;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.enums.ExecutorBlockStrategyEnum;
import com.xxl.job.core.glue.GlueTypeEnum;
import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.text.MessageFormat;
import java.util.*;
/**
* core job action for xxl-job
* @author xuxueli 2016-5-28 15:30:33
*/
@Service
public class XxlJobServiceImpl implements XxlJobService {
private static Logger logger = LoggerFactory.getLogger(XxlJobServiceImpl.class);
@Resource
private XxlJobGroupDao xxlJobGroupDao;
@Resource
private XxlJobInfoDao xxlJobInfoDao;
@Resource
public XxlJobLogDao xxlJobLogDao;
@Resource
private XxlJobLogGlueDao xxlJobLogGlueDao;
@Resource
private XxlJobLogReportDao xxlJobLogReportDao;
@Override
public Map<String, Object> pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) {
// page list
List<XxlJobInfo> list = xxlJobInfoDao.pageList(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author);
int list_count = xxlJobInfoDao.pageListCount(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author);
// package result
Map<String, Object> maps = new HashMap<String, Object>();
// 总记录数
maps.put("recordsTotal", list_count);
// 过滤后的总记录数
maps.put("recordsFiltered", list_count);
// 分页列表
maps.put("data", list);
return maps;
}
@Override
public ReturnT<String> add(XxlJobInfo jobInfo, XxlJobUser loginUser) {
// valid base
XxlJobGroup group = xxlJobGroupDao.load(jobInfo.getJobGroup());
if (group == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose")+I18nUtil.getString("jobinfo_field_jobgroup")) );
}
if (jobInfo.getJobDesc()==null || jobInfo.getJobDesc().trim().length()==0) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input")+I18nUtil.getString("jobinfo_field_jobdesc")) );
}
if (jobInfo.getAuthor()==null || jobInfo.getAuthor().trim().length()==0) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input")+I18nUtil.getString("jobinfo_field_author")) );
}
// valid trigger
ScheduleTypeEnum scheduleTypeEnum = ScheduleTypeEnum.match(jobInfo.getScheduleType(), null);
if (scheduleTypeEnum == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
if (scheduleTypeEnum == ScheduleTypeEnum.CRON) {
if (jobInfo.getScheduleConf()==null || !CronExpression.isValidExpression(jobInfo.getScheduleConf())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "Cron"+I18nUtil.getString("system_unvalid"));
}
/* || scheduleTypeEnum == ScheduleTypeEnum.FIX_DELAY*/
} else if (scheduleTypeEnum == ScheduleTypeEnum.FIX_RATE) {
if (jobInfo.getScheduleConf() == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")) );
}
try {
int fixSecond = Integer.parseInt(jobInfo.getScheduleConf());
if (fixSecond < 1) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
} catch (Exception e) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
}
// valid job
if (GlueTypeEnum.match(jobInfo.getGlueType()) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_gluetype")+I18nUtil.getString("system_unvalid")) );
}
if (GlueTypeEnum.BEAN==GlueTypeEnum.match(jobInfo.getGlueType()) && (jobInfo.getExecutorHandler()==null || jobInfo.getExecutorHandler().trim().length()==0) ) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input")+"JobHandler") );
}
// 》fix "\r" in shell
if (GlueTypeEnum.GLUE_SHELL==GlueTypeEnum.match(jobInfo.getGlueType()) && jobInfo.getGlueSource()!=null) {
jobInfo.setGlueSource(jobInfo.getGlueSource().replaceAll("\r", ""));
}
// valid advanced
if (ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy")+I18nUtil.getString("system_unvalid")) );
}
if (MisfireStrategyEnum.match(jobInfo.getMisfireStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("misfire_strategy")+I18nUtil.getString("system_unvalid")) );
}
if (ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy")+I18nUtil.getString("system_unvalid")) );
}
// 》ChildJobId valid
if (jobInfo.getChildJobId()!=null && jobInfo.getChildJobId().trim().length()>0) {
String[] childJobIds = jobInfo.getChildJobId().split(",");
for (String childJobIdItem: childJobIds) {
if (childJobIdItem!=null && childJobIdItem.trim().length()>0 && isNumeric(childJobIdItem)) {
XxlJobInfo childJobInfo = xxlJobInfoDao.loadById(Integer.parseInt(childJobIdItem));
if (childJobInfo==null) {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_not_found")), childJobIdItem));
}
if (!loginUser.validPermission(childJobInfo.getJobGroup())) {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_permission_limit")), childJobIdItem));
}
} else {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_unvalid")), childJobIdItem));
}
}
// join , avoid "xxx,,"
String temp = "";
for (String item:childJobIds) {
temp += item + ",";
}
temp = temp.substring(0, temp.length()-1);
jobInfo.setChildJobId(temp);
}
// add in db
jobInfo.setAddTime(new Date());
jobInfo.setUpdateTime(new Date());
jobInfo.setGlueUpdatetime(new Date());
xxlJobInfoDao.save(jobInfo);
if (jobInfo.getId() < 1) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_add")+I18nUtil.getString("system_fail")) );
}
return new ReturnT<String>(String.valueOf(jobInfo.getId()));
}
private boolean isNumeric(String str){
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
@Override
public ReturnT<String> update(XxlJobInfo jobInfo, XxlJobUser loginUser) {
// valid base
if (jobInfo.getJobDesc()==null || jobInfo.getJobDesc().trim().length()==0) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input")+I18nUtil.getString("jobinfo_field_jobdesc")) );
}
if (jobInfo.getAuthor()==null || jobInfo.getAuthor().trim().length()==0) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input")+I18nUtil.getString("jobinfo_field_author")) );
}
// valid trigger
ScheduleTypeEnum scheduleTypeEnum = ScheduleTypeEnum.match(jobInfo.getScheduleType(), null);
if (scheduleTypeEnum == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
if (scheduleTypeEnum == ScheduleTypeEnum.CRON) {
if (jobInfo.getScheduleConf()==null || !CronExpression.isValidExpression(jobInfo.getScheduleConf())) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "Cron"+I18nUtil.getString("system_unvalid") );
}
/*|| scheduleTypeEnum == ScheduleTypeEnum.FIX_DELAY*/
} else if (scheduleTypeEnum == ScheduleTypeEnum.FIX_RATE) {
if (jobInfo.getScheduleConf() == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
try {
int fixSecond = Integer.parseInt(jobInfo.getScheduleConf());
if (fixSecond < 1) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
} catch (Exception e) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
}
// valid advanced
if (ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy")+I18nUtil.getString("system_unvalid")) );
}
if (MisfireStrategyEnum.match(jobInfo.getMisfireStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("misfire_strategy")+I18nUtil.getString("system_unvalid")) );
}
if (ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), null) == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy")+I18nUtil.getString("system_unvalid")) );
}
// 》ChildJobId valid
if (jobInfo.getChildJobId()!=null && jobInfo.getChildJobId().trim().length()>0) {
String[] childJobIds = jobInfo.getChildJobId().split(",");
for (String childJobIdItem: childJobIds) {
if (childJobIdItem!=null && childJobIdItem.trim().length()>0 && isNumeric(childJobIdItem)) {
XxlJobInfo childJobInfo = xxlJobInfoDao.loadById(Integer.parseInt(childJobIdItem));
if (childJobInfo==null) {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_not_found")), childJobIdItem));
}
if (!loginUser.validPermission(childJobInfo.getJobGroup())) {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_permission_limit")), childJobIdItem));
}
} else {
return new ReturnT<String>(ReturnT.FAIL_CODE,
MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId")+"({0})"+I18nUtil.getString("system_unvalid")), childJobIdItem));
}
}
// join , avoid "xxx,,"
String temp = "";
for (String item:childJobIds) {
temp += item + ",";
}
temp = temp.substring(0, temp.length()-1);
jobInfo.setChildJobId(temp);
}
// group valid
XxlJobGroup jobGroup = xxlJobGroupDao.load(jobInfo.getJobGroup());
if (jobGroup == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_jobgroup")+I18nUtil.getString("system_unvalid")) );
}
// stage job info
XxlJobInfo exists_jobInfo = xxlJobInfoDao.loadById(jobInfo.getId());
if (exists_jobInfo == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_id")+I18nUtil.getString("system_not_found")) );
}
// next trigger time (5s后生效避开预读周期)
long nextTriggerTime = exists_jobInfo.getTriggerNextTime();
boolean scheduleDataNotChanged = jobInfo.getScheduleType().equals(exists_jobInfo.getScheduleType()) && jobInfo.getScheduleConf().equals(exists_jobInfo.getScheduleConf());
if (exists_jobInfo.getTriggerStatus() == 1 && !scheduleDataNotChanged) {
try {
Date nextValidTime = JobScheduleHelper.generateNextValidTime(jobInfo, new Date(System.currentTimeMillis() + JobScheduleHelper.PRE_READ_MS));
if (nextValidTime == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
nextTriggerTime = nextValidTime.getTime();
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
}
exists_jobInfo.setJobGroup(jobInfo.getJobGroup());
exists_jobInfo.setJobDesc(jobInfo.getJobDesc());
exists_jobInfo.setAuthor(jobInfo.getAuthor());
exists_jobInfo.setAlarmEmail(jobInfo.getAlarmEmail());
exists_jobInfo.setScheduleType(jobInfo.getScheduleType());
exists_jobInfo.setScheduleConf(jobInfo.getScheduleConf());
exists_jobInfo.setMisfireStrategy(jobInfo.getMisfireStrategy());
exists_jobInfo.setExecutorRouteStrategy(jobInfo.getExecutorRouteStrategy());
exists_jobInfo.setExecutorHandler(jobInfo.getExecutorHandler());
exists_jobInfo.setExecutorParam(jobInfo.getExecutorParam());
exists_jobInfo.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
exists_jobInfo.setExecutorTimeout(jobInfo.getExecutorTimeout());
exists_jobInfo.setExecutorFailRetryCount(jobInfo.getExecutorFailRetryCount());
exists_jobInfo.setChildJobId(jobInfo.getChildJobId());
exists_jobInfo.setTriggerNextTime(nextTriggerTime);
exists_jobInfo.setUpdateTime(new Date());
xxlJobInfoDao.update(exists_jobInfo);
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> remove(int id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
if (xxlJobInfo == null) {
return ReturnT.SUCCESS;
}
xxlJobInfoDao.delete(id);
xxlJobLogDao.delete(id);
xxlJobLogGlueDao.deleteByJobId(id);
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> start(int id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
// valid
ScheduleTypeEnum scheduleTypeEnum = ScheduleTypeEnum.match(xxlJobInfo.getScheduleType(), ScheduleTypeEnum.NONE);
if (ScheduleTypeEnum.NONE == scheduleTypeEnum) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type_none_limit_start")) );
}
// next trigger time (5s后生效避开预读周期)
long nextTriggerTime = 0;
try {
Date nextValidTime = JobScheduleHelper.generateNextValidTime(xxlJobInfo, new Date(System.currentTimeMillis() + JobScheduleHelper.PRE_READ_MS));
if (nextValidTime == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
nextTriggerTime = nextValidTime.getTime();
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("schedule_type")+I18nUtil.getString("system_unvalid")) );
}
xxlJobInfo.setTriggerStatus(1);
xxlJobInfo.setTriggerLastTime(0);
xxlJobInfo.setTriggerNextTime(nextTriggerTime);
xxlJobInfo.setUpdateTime(new Date());
xxlJobInfoDao.update(xxlJobInfo);
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> stop(int id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
xxlJobInfo.setTriggerStatus(0);
xxlJobInfo.setTriggerLastTime(0);
xxlJobInfo.setTriggerNextTime(0);
xxlJobInfo.setUpdateTime(new Date());
xxlJobInfoDao.update(xxlJobInfo);
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> trigger(XxlJobUser loginUser, int jobId, String executorParam, String addressList) {
// permission
if (loginUser == null) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("system_permission_limit"));
}
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(jobId);
if (xxlJobInfo == null) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("jobinfo_glue_jobid_unvalid"));
}
if (!hasPermission(loginUser, xxlJobInfo.getJobGroup())) {
return new ReturnT<String>(ReturnT.FAIL.getCode(), I18nUtil.getString("system_permission_limit"));
}
// force cover job param
if (executorParam == null) {
executorParam = "";
}
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.MANUAL, -1, null, executorParam, addressList);
return ReturnT.SUCCESS;
}
private boolean hasPermission(XxlJobUser loginUser, int jobGroup){
if (loginUser.getRole() == 1) {
return true;
}
List<String> groupIdStrs = new ArrayList<>();
if (loginUser.getPermission()!=null && loginUser.getPermission().trim().length()>0) {
groupIdStrs = Arrays.asList(loginUser.getPermission().trim().split(","));
}
return groupIdStrs.contains(String.valueOf(jobGroup));
}
@Override
public Map<String, Object> dashboardInfo() {
int jobInfoCount = xxlJobInfoDao.findAllCount();
int jobLogCount = 0;
int jobLogSuccessCount = 0;
XxlJobLogReport xxlJobLogReport = xxlJobLogReportDao.queryLogReportTotal();
if (xxlJobLogReport != null) {
jobLogCount = xxlJobLogReport.getRunningCount() + xxlJobLogReport.getSucCount() + xxlJobLogReport.getFailCount();
jobLogSuccessCount = xxlJobLogReport.getSucCount();
}
// executor count
Set<String> executorAddressSet = new HashSet<String>();
List<XxlJobGroup> groupList = xxlJobGroupDao.findAll();
if (groupList!=null && !groupList.isEmpty()) {
for (XxlJobGroup group: groupList) {
if (group.getRegistryList()!=null && !group.getRegistryList().isEmpty()) {
executorAddressSet.addAll(group.getRegistryList());
}
}
}
int executorCount = executorAddressSet.size();
Map<String, Object> dashboardMap = new HashMap<String, Object>();
dashboardMap.put("jobInfoCount", jobInfoCount);
dashboardMap.put("jobLogCount", jobLogCount);
dashboardMap.put("jobLogSuccessCount", jobLogSuccessCount);
dashboardMap.put("executorCount", executorCount);
return dashboardMap;
}
@Override
public ReturnT<Map<String, Object>> chartInfo(Date startDate, Date endDate) {
// process
List<String> triggerDayList = new ArrayList<String>();
List<Integer> triggerDayCountRunningList = new ArrayList<Integer>();
List<Integer> triggerDayCountSucList = new ArrayList<Integer>();
List<Integer> triggerDayCountFailList = new ArrayList<Integer>();
int triggerCountRunningTotal = 0;
int triggerCountSucTotal = 0;
int triggerCountFailTotal = 0;
List<XxlJobLogReport> logReportList = xxlJobLogReportDao.queryLogReport(startDate, endDate);
if (logReportList!=null && logReportList.size()>0) {
for (XxlJobLogReport item: logReportList) {
String day = DateUtil.formatDate(item.getTriggerDay());
int triggerDayCountRunning = item.getRunningCount();
int triggerDayCountSuc = item.getSucCount();
int triggerDayCountFail = item.getFailCount();
triggerDayList.add(day);
triggerDayCountRunningList.add(triggerDayCountRunning);
triggerDayCountSucList.add(triggerDayCountSuc);
triggerDayCountFailList.add(triggerDayCountFail);
triggerCountRunningTotal += triggerDayCountRunning;
triggerCountSucTotal += triggerDayCountSuc;
triggerCountFailTotal += triggerDayCountFail;
}
} else {
for (int i = -6; i <= 0; i++) {
triggerDayList.add(DateUtil.formatDate(DateUtil.addDays(new Date(), i)));
triggerDayCountRunningList.add(0);
triggerDayCountSucList.add(0);
triggerDayCountFailList.add(0);
}
}
Map<String, Object> result = new HashMap<String, Object>();
result.put("triggerDayList", triggerDayList);
result.put("triggerDayCountRunningList", triggerDayCountRunningList);
result.put("triggerDayCountSucList", triggerDayCountSucList);
result.put("triggerDayCountFailList", triggerDayCountFailList);
result.put("triggerCountRunningTotal", triggerCountRunningTotal);
result.put("triggerCountSucTotal", triggerCountSucTotal);
result.put("triggerCountFailTotal", triggerCountFailTotal);
return new ReturnT<Map<String, Object>>(result);
}
}

View File

@ -1,66 +0,0 @@
### web
server.port=8969
server.servlet.context-path=/xxl-job-admin
### actuator
management.server.base-path=/actuator
management.health.mail.enabled=false
### resources
spring.mvc.servlet.load-on-startup=0
spring.mvc.static-path-pattern=/static/**
spring.web.resources.static-locations=classpath:/static/
### freemarker
spring.freemarker.templateLoaderPath=classpath:/templates/
spring.freemarker.suffix=.ftl
spring.freemarker.charset=UTF-8
spring.freemarker.request-context-attribute=request
spring.freemarker.settings.number_format=0.##########
spring.freemarker.settings.new_builtin_class_resolver=safer
### mybatis
mybatis.mapper-locations=classpath:/mybatis-mapper/*Mapper.xml
#mybatis.type-aliases-package=com.xxl.job.admin.core.model
### xxl-job, datasource
spring.datasource.url=jdbc:mysql://mysql.cloud.muyu.icu:13036/cloud-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai
spring.datasource.username=root
spring.datasource.password=MuYu_Cloud@131
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
### datasource-pool
spring.datasource.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.hikari.minimum-idle=10
spring.datasource.hikari.maximum-pool-size=30
spring.datasource.hikari.auto-commit=true
spring.datasource.hikari.idle-timeout=30000
spring.datasource.hikari.pool-name=HikariCP
spring.datasource.hikari.max-lifetime=900000
spring.datasource.hikari.connection-timeout=10000
spring.datasource.hikari.connection-test-query=SELECT 1
spring.datasource.hikari.validation-timeout=1000
### xxl-job, email
spring.mail.host=smtp.163.com
spring.mail.port=465
spring.mail.from=dongzl828@163.com
spring.mail.username=dongzl828@163.com
spring.mail.password=EGHEDRZUIZWUIGZY
spring.mail.properties.mail.smtp.auth=true
spring.mail.properties.mail.smtp.starttls.enable=true
spring.mail.properties.mail.smtp.starttls.required=true
spring.mail.properties.mail.smtp.socketFactory.class=javax.net.ssl.SSLSocketFactory
### xxl-job, access token
xxl.job.accessToken=Cloud-MuYu-Xxl-Job-Admin
### xxl-job, i18n (default is zh_CN, and you can choose "zh_CN", "zh_TC" and "en")
xxl.job.i18n=zh_CN
## xxl-job, triggerpool max size
xxl.job.triggerpool.fast.max=200
xxl.job.triggerpool.slow.max=100
### xxl-job, log retention days
xxl.job.logretentiondays=30

View File

@ -1,277 +0,0 @@
admin_name=Scheduling Center
admin_name_full=Distributed Task Scheduling Platform XXL-JOB
admin_version=2.5.0-SNAPSHOT
admin_i18n=en
## system
system_tips=System message
system_ok=Confirm
system_close=Close
system_save=Save
system_cancel=Cancel
system_search=Search
system_status=Status
system_opt=Operate
system_please_input=please input
system_please_choose=please choose
system_success=success
system_fail=fail
system_add_suc=add success
system_add_fail=add fail
system_update_suc=update success
system_update_fail=update fail
system_all=All
system_api_error=net error
system_show=Show
system_empty=Empty
system_opt_suc=operate success
system_opt_fail=operate fail
system_opt_edit=Edit
system_opt_del=Delete
system_opt_copy=Copy
system_unvalid=illegal
system_not_found=not exist
system_nav=Navigation
system_digits=digits
system_lengh_limit=Length limit
system_permission_limit=Permission limit
system_welcome=Welcome
## daterangepicker
daterangepicker_ranges_recent_hour=recent one hour
daterangepicker_ranges_today=today
daterangepicker_ranges_yesterday=yesterday
daterangepicker_ranges_this_month=this month
daterangepicker_ranges_last_month=last month
daterangepicker_ranges_recent_week=recent one week
daterangepicker_ranges_recent_month=recent one month
daterangepicker_custom_name=custom
daterangepicker_custom_starttime=start time
daterangepicker_custom_endtime=end time
daterangepicker_custom_daysofweek=Sun,Mon,Tue,Wed,Thu,Fri,Sat
daterangepicker_custom_monthnames=Jan,Feb,Mar,Apr,May,Jun,Jul,Aug,Sep,Oct,Nov,Dec
## dataTable
dataTable_sProcessing=processing...
dataTable_sLengthMenu= _MENU_ records per page
dataTable_sZeroRecords=No matching results
dataTable_sInfo=page _PAGE_ ( Total _PAGES_ pages_TOTAL_ records )
dataTable_sInfoEmpty=No Record
dataTable_sInfoFiltered=(Filtered by _MAX_ results)
dataTable_sSearch=Search
dataTable_sEmptyTable=Table data is empty
dataTable_sLoadingRecords=Loading...
dataTable_sFirst=FIRST PAGE
dataTable_sPrevious=Previous Page
dataTable_sNext=Next Page
dataTable_sLast=LAST PAGE
dataTable_sSortAscending=: Rank this column in ascending order
dataTable_sSortDescending=: Rank this column in descending order
## login
login_btn=Login
login_remember_me=Remember Me
login_username_placeholder=Please enter username
login_password_placeholder=Please enter password
login_username_empty=Please enter username
login_username_lt_4=Username length should not be less than 4
login_password_empty=Please enter password
login_password_lt_4=Password length should not be less than 4
login_success=Login success
login_fail=Login fail
login_param_empty=Username or password is empty
login_param_unvalid=Username or password error
## logout
logout_btn=Logout
logout_confirm=Confirm logout?
logout_success=Logout success
logout_fail=Logout fail
## change pwd
change_pwd=Change password
change_pwd_suc_to_logout=Change password successful, about to log out login
change_pwd_field_oldpwd=old password
change_pwd_field_newpwd=new password
## dashboard
job_dashboard_name=Run report
job_dashboard_job_num=Job number
job_dashboard_job_num_tip=The number of tasks running in the scheduling center
job_dashboard_trigger_num=trigger number
job_dashboard_trigger_num_tip=The number of trigger record scheduled by the scheduling center
job_dashboard_jobgroup_num=Executor number
job_dashboard_jobgroup_num_tip=The number of online executor machines perceived by the scheduling center
job_dashboard_report=Scheduling report
job_dashboard_report_loaddata_fail=Scheduling report load data error
job_dashboard_date_report=Date distribution
job_dashboard_rate_report=Percentage distribution
## job info
jobinfo_name=Job Manage
jobinfo_job=Job
jobinfo_field_add=Add Job
jobinfo_field_update=Edit Job
jobinfo_field_id=Job ID
jobinfo_field_jobgroup=Executor
jobinfo_field_jobdesc=Job description
jobinfo_field_timeout=Job timeout period
jobinfo_field_gluetype=GLUE Type
jobinfo_field_executorparam=Param
jobinfo_field_author=Author
jobinfo_field_alarmemail=Alarm email
jobinfo_field_alarmemail_placeholder=Please enter alarm mail, if there are more than one comma separated
jobinfo_field_executorRouteStrategy=Route Strategy
jobinfo_field_childJobId=Child Job ID
jobinfo_field_childJobId_placeholder=Please enter the Child job ID, if there are more than one comma separated
jobinfo_field_executorBlockStrategy=Block Strategy
jobinfo_field_executorFailRetryCount=Fail Retry Count
jobinfo_field_executorFailRetryCount_placeholder=Fail Retry Count. effect if greater than zero
jobinfo_script_location=Script location
jobinfo_shard_index=Shard index
jobinfo_shard_total=Shard total
jobinfo_opt_stop=Stop
jobinfo_opt_start=Start
jobinfo_opt_log=Query Log
jobinfo_opt_run=Run Once
jobinfo_opt_run_tips=Please input the address for this trigger. Null will be obtained from the executor
jobinfo_opt_registryinfo=Registry Info
jobinfo_opt_next_time=Next trigger time
jobinfo_glue_remark=Resource Remark
jobinfo_glue_remark_limit=Resource Remark length is limited to 4~100
jobinfo_glue_rollback=Version Backtrack
jobinfo_glue_jobid_unvalid=Job ID is illegal
jobinfo_glue_gluetype_unvalid=The job is not GLUE Type
jobinfo_field_executorTimeout_placeholder=Job Timeout periodin seconds. effect if greater than zero
schedule_type=Schedule Type
schedule_type_none=None
schedule_type_cron=Cron
schedule_type_fix_rate=Fix rate
schedule_type_fix_delay=Fix delay
schedule_type_none_limit_start=The current schedule type disables startup
misfire_strategy=Misfire strategy
misfire_strategy_do_nothing=Do nothing
misfire_strategy_fire_once_now=Fire once now
jobinfo_conf_base=Base configuration
jobinfo_conf_schedule=Schedule configuration
jobinfo_conf_job=Job configuration
jobinfo_conf_advanced=Advanced configuration
## job log
joblog_name=Trigger Log
joblog_status=Status
joblog_status_all=All
joblog_status_suc=Success
joblog_status_fail=Fail
joblog_status_running=Running
joblog_field_triggerTime=Trigger Time
joblog_field_triggerCode=Trigger Result
joblog_field_triggerMsg=Trigger Msg
joblog_field_handleTime=Handle Time
joblog_field_handleCode=Handle Result
joblog_field_handleMsg=Trigger Msg
joblog_field_executorAddress=Executor Address
joblog_clean=Clean
joblog_clean_log=Clean Log
joblog_clean_type=Clean Type
joblog_clean_type_1=Clean up log data a month ago
joblog_clean_type_2=Clean up log data three month ago
joblog_clean_type_3=Clean up log data six month ago
joblog_clean_type_4=Clean up log data a year ago
joblog_clean_type_5=Clean up log data a thousand record ago
joblog_clean_type_6=Clean up log data ten thousand record ago
joblog_clean_type_7=Clean up log data thirty thousand record ago
joblog_clean_type_8=Clean up log data hundred thousand record ago
joblog_clean_type_9=Clean up all log data
joblog_clean_type_unvalid=Clean type is illegal
joblog_handleCode_200=Success
joblog_handleCode_500=Fail
joblog_handleCode_502=Timeout
joblog_kill_log=Kill Job
joblog_kill_log_limit=Trigger Fail, can not kill job
joblog_kill_log_byman=Manual operation, kill job
joblog_lost_fail=Job result lost, marked as failure
joblog_rolling_log=Rolling log
joblog_rolling_log_refresh=Refresh
joblog_rolling_log_triggerfail=The job trigger fail, can not view the rolling log
joblog_rolling_log_failoften=The request for the Rolling log is terminated, the number of failed requests exceeds the limit, Reload the log on the refresh page
joblog_logid_unvalid=Log ID is illegal
## job group
jobgroup_name=Executor Manage
jobgroup_list=Executor List
jobgroup_add=Add Executor
jobgroup_edit=Edit Executor
jobgroup_del=Delete Executor
jobgroup_field_title=Title
jobgroup_field_addressType=Registry Type
jobgroup_field_addressType_0=Automatic registration
jobgroup_field_addressType_1=Manual registration
jobgroup_field_addressType_limit=Manually registration type, the machine address must not be empty
jobgroup_field_registryList=machine address
jobgroup_field_registryList_unvalid=registry machine address is illegal
jobgroup_field_registryList_placeholder=Please enter the machine address, if there are more than one comma separated
jobgroup_field_appname_limit=Limit the beginning of a lowercase letter, consists of lowercase letters、number and hyphen.
jobgroup_field_appname_length=AppName length is limited to 4~64
jobgroup_field_title_length=Title length is limited to 4~12
jobgroup_field_order_digits=Please enter a positive integer
jobgroup_field_orderrange=Order is limited to 1~1000
jobgroup_del_limit_0=Refuse to delete, the executor is being used
jobgroup_del_limit_1=Refuses to delete, the system retains at least one executor
jobgroup_empty=There is no valid executor. Please contact the administrator
## job conf
jobconf_block_SERIAL_EXECUTION=Serial execution
jobconf_block_DISCARD_LATER=Discard Later
jobconf_block_COVER_EARLY=Cover Early
jobconf_route_first=First
jobconf_route_last=Last
jobconf_route_round=Round
jobconf_route_random=Random
jobconf_route_consistenthash=Consistent Hash
jobconf_route_lfu=Least Frequently Used
jobconf_route_lru=Least Recently Used
jobconf_route_failover=Failover
jobconf_route_busyover=Busyover
jobconf_route_shard=Sharding Broadcast
jobconf_idleBeat=Idle check
jobconf_beat=Heartbeats
jobconf_monitor=Task Scheduling Center monitor alarm
jobconf_monitor_detail=monitor alarm details
jobconf_monitor_alarm_title=Alarm Type
jobconf_monitor_alarm_type=Trigger Fail
jobconf_monitor_alarm_content=Alarm Content
jobconf_trigger_admin_adress=Trigger machine address
jobconf_trigger_exe_regtype=Execotor-Registry Type
jobconf_trigger_exe_regaddress=Execotor-Registry Address
jobconf_trigger_address_empty=Trigger Failregistry address is empty
jobconf_trigger_run=Trigger Job
jobconf_trigger_child_run=Trigger child job
jobconf_callback_child_msg1={0}/{1} [Job ID={2}], Trigger {3}, Trigger msg: {4} <br>
jobconf_callback_child_msg2={0}/{1} [Job ID={2}], Trigger Fail, Trigger msg: Job ID is illegal <br>
jobconf_trigger_type=Job trigger type
jobconf_trigger_type_cron=Cron trigger
jobconf_trigger_type_manual=Manual trigger
jobconf_trigger_type_parent=Parent job trigger
jobconf_trigger_type_api=Api trigger
jobconf_trigger_type_retry=Fail retry trigger
jobconf_trigger_type_misfire=Misfire compensation trigger
## user
user_manage=User Manage
user_username=Username
user_password=Password
user_role=Role
user_role_admin=Admin User
user_role_normal=Normal User
user_permission=Permission
user_add=Add User
user_update=Edit User
user_username_repeat=Username Repeat
user_username_valid=Restrictions start with a lowercase letter and consist of lowercase letters and Numbers
user_password_update_placeholder=Please input password, empty means not update
user_update_loginuser_limit=Operation of current login account is not allowed
## help
job_help=Tutorial
job_help_document=Official Document

View File

@ -1,277 +0,0 @@
admin_name=任务调度中心
admin_name_full=分布式任务调度平台XXL-JOB
admin_version=2.5.0-SNAPSHOT
admin_i18n=
## system
system_tips=系统提示
system_ok=确定
system_close=关闭
system_save=保存
system_cancel=取消
system_search=搜索
system_status=状态
system_opt=操作
system_please_input=请输入
system_please_choose=请选择
system_success=成功
system_fail=失败
system_add_suc=新增成功
system_add_fail=新增失败
system_update_suc=更新成功
system_update_fail=更新失败
system_all=全部
system_api_error=接口异常
system_show=查看
system_empty=
system_opt_suc=操作成功
system_opt_fail=操作失败
system_opt_edit=编辑
system_opt_del=删除
system_opt_copy=复制
system_unvalid=非法
system_not_found=不存在
system_nav=导航
system_digits=整数
system_lengh_limit=长度限制
system_permission_limit=权限拦截
system_welcome=欢迎
## daterangepicker
daterangepicker_ranges_recent_hour=最近一小时
daterangepicker_ranges_today=今日
daterangepicker_ranges_yesterday=昨日
daterangepicker_ranges_this_month=本月
daterangepicker_ranges_last_month=上个月
daterangepicker_ranges_recent_week=最近一周
daterangepicker_ranges_recent_month=最近一月
daterangepicker_custom_name=自定义
daterangepicker_custom_starttime=起始时间
daterangepicker_custom_endtime=结束时间
daterangepicker_custom_daysofweek=日,一,二,三,四,五,六
daterangepicker_custom_monthnames=一月,二月,三月,四月,五月,六月,七月,八月,九月,十月,十一月,十二月
## dataTable
dataTable_sProcessing=处理中...
dataTable_sLengthMenu=每页 _MENU_ 条记录
dataTable_sZeroRecords=没有匹配结果
dataTable_sInfo=第 _PAGE_ 页 ( 总共 _PAGES_ 页_TOTAL_ 条记录 )
dataTable_sInfoEmpty=无记录
dataTable_sInfoFiltered=(由 _MAX_ 项结果过滤)
dataTable_sSearch=搜索
dataTable_sEmptyTable=表中数据为空
dataTable_sLoadingRecords=载入中...
dataTable_sFirst=首页
dataTable_sPrevious=上页
dataTable_sNext=下页
dataTable_sLast=末页
dataTable_sSortAscending=: 以升序排列此列
dataTable_sSortDescending=: 以降序排列此列
## login
login_btn=登录
login_remember_me=记住密码
login_username_placeholder=请输入登录账号
login_password_placeholder=请输入登录密码
login_username_empty=请输入登录账号
login_username_lt_4=登录账号不应低于4位
login_password_empty=请输入登录密码
login_password_lt_4=登录密码不应低于4位
login_success=登录成功
login_fail=登录失败
login_param_empty=账号或密码为空
login_param_unvalid=账号或密码错误
## logout
logout_btn=注销
logout_confirm=确认注销登录?
logout_success=注销成功
logout_fail=注销失败
## change pwd
change_pwd=修改密码
change_pwd_suc_to_logout=修改密码成功,即将注销登陆
change_pwd_field_oldpwd=旧密码
change_pwd_field_newpwd=新密码
## dashboard
job_dashboard_name=运行报表
job_dashboard_job_num=任务数量
job_dashboard_job_num_tip=调度中心运行的任务数量
job_dashboard_trigger_num=调度次数
job_dashboard_trigger_num_tip=调度中心触发的调度次数
job_dashboard_jobgroup_num=执行器数量
job_dashboard_jobgroup_num_tip=调度中心在线的执行器机器数量
job_dashboard_report=调度报表
job_dashboard_report_loaddata_fail=调度报表数据加载异常
job_dashboard_date_report=日期分布图
job_dashboard_rate_report=成功比例图
## job info
jobinfo_name=任务管理
jobinfo_job=任务
jobinfo_field_add=新增
jobinfo_field_update=更新任务
jobinfo_field_id=任务ID
jobinfo_field_jobgroup=执行器
jobinfo_field_jobdesc=任务描述
jobinfo_field_gluetype=运行模式
jobinfo_field_executorparam=任务参数
jobinfo_field_author=负责人
jobinfo_field_timeout=任务超时时间
jobinfo_field_alarmemail=报警邮件
jobinfo_field_alarmemail_placeholder=请输入报警邮件,多个邮件地址则逗号分隔
jobinfo_field_executorRouteStrategy=路由策略
jobinfo_field_childJobId=子任务ID
jobinfo_field_childJobId_placeholder=请输入子任务的任务ID,如存在多个则逗号分隔
jobinfo_field_executorBlockStrategy=阻塞处理策略
jobinfo_field_executorFailRetryCount=失败重试次数
jobinfo_field_executorFailRetryCount_placeholder=失败重试次数,大于零时生效
jobinfo_script_location=脚本位置
jobinfo_shard_index=分片序号
jobinfo_shard_total=分片总数
jobinfo_opt_stop=停止
jobinfo_opt_start=启动
jobinfo_opt_log=查询日志
jobinfo_opt_run=执行一次
jobinfo_opt_run_tips=请输入本次执行的机器地址,为空则从执行器获取
jobinfo_opt_registryinfo=注册节点
jobinfo_opt_next_time=下次执行时间
jobinfo_glue_remark=源码备注
jobinfo_glue_remark_limit=源码备注长度限制为4~100
jobinfo_glue_rollback=版本回溯
jobinfo_glue_jobid_unvalid=任务ID非法
jobinfo_glue_gluetype_unvalid=该任务非GLUE模式
jobinfo_field_executorTimeout_placeholder=任务超时时间,单位秒,大于零时生效
schedule_type=调度类型
schedule_type_none=
schedule_type_cron=CRON
schedule_type_fix_rate=固定速度
schedule_type_fix_delay=固定延迟
schedule_type_none_limit_start=当前调度类型禁止启动
misfire_strategy=调度过期策略
misfire_strategy_do_nothing=忽略
misfire_strategy_fire_once_now=立即执行一次
jobinfo_conf_base=基础配置
jobinfo_conf_schedule=调度配置
jobinfo_conf_job=任务配置
jobinfo_conf_advanced=高级配置
## job log
joblog_name=调度日志
joblog_status=状态
joblog_status_all=全部
joblog_status_suc=成功
joblog_status_fail=失败
joblog_status_running=进行中
joblog_field_triggerTime=调度时间
joblog_field_triggerCode=调度结果
joblog_field_triggerMsg=调度备注
joblog_field_handleTime=执行时间
joblog_field_handleCode=执行结果
joblog_field_handleMsg=执行备注
joblog_field_executorAddress=执行器地址
joblog_clean=清理
joblog_clean_log=日志清理
joblog_clean_type=清理方式
joblog_clean_type_1=清理一个月之前日志数据
joblog_clean_type_2=清理三个月之前日志数据
joblog_clean_type_3=清理六个月之前日志数据
joblog_clean_type_4=清理一年之前日志数据
joblog_clean_type_5=清理一千条以前日志数据
joblog_clean_type_6=清理一万条以前日志数据
joblog_clean_type_7=清理三万条以前日志数据
joblog_clean_type_8=清理十万条以前日志数据
joblog_clean_type_9=清理所有日志数据
joblog_clean_type_unvalid=清理类型参数异常
joblog_handleCode_200=成功
joblog_handleCode_500=失败
joblog_handleCode_502=失败(超时)
joblog_kill_log=终止任务
joblog_kill_log_limit=调度失败,无法终止日志
joblog_kill_log_byman=人为操作,主动终止
joblog_lost_fail=任务结果丢失,标记失败
joblog_rolling_log=执行日志
joblog_rolling_log_refresh=刷新
joblog_rolling_log_triggerfail=任务发起调度失败,无法查看执行日志
joblog_rolling_log_failoften=终止请求Rolling日志,请求失败次数超上限,可刷新页面重新加载日志
joblog_logid_unvalid=日志ID非法
## job group
jobgroup_name=执行器管理
jobgroup_list=执行器列表
jobgroup_add=新增执行器
jobgroup_edit=编辑执行器
jobgroup_del=删除执行器
jobgroup_field_title=名称
jobgroup_field_addressType=注册方式
jobgroup_field_addressType_0=自动注册
jobgroup_field_addressType_1=手动录入
jobgroup_field_addressType_limit=手动录入注册方式,机器地址不可为空
jobgroup_field_registryList=机器地址
jobgroup_field_registryList_unvalid=机器地址格式非法
jobgroup_field_registryList_placeholder=请输入执行器地址列表,多地址逗号分隔
jobgroup_field_appname_limit=限制以小写字母开头,由小写字母、数字和中划线组成
jobgroup_field_appname_length=AppName长度限制为4~64
jobgroup_field_title_length=名称长度限制为4~12
jobgroup_field_order_digits=请输入整数
jobgroup_field_orderrange=取值范围为1~1000
jobgroup_del_limit_0=拒绝删除,该执行器使用中
jobgroup_del_limit_1=拒绝删除, 系统至少保留一个执行器
jobgroup_empty=不存在有效执行器,请联系管理员
## job conf
jobconf_block_SERIAL_EXECUTION=单机串行
jobconf_block_DISCARD_LATER=丢弃后续调度
jobconf_block_COVER_EARLY=覆盖之前调度
jobconf_route_first=第一个
jobconf_route_last=最后一个
jobconf_route_round=轮询
jobconf_route_random=随机
jobconf_route_consistenthash=一致性HASH
jobconf_route_lfu=最不经常使用
jobconf_route_lru=最近最久未使用
jobconf_route_failover=故障转移
jobconf_route_busyover=忙碌转移
jobconf_route_shard=分片广播
jobconf_idleBeat=空闲检测
jobconf_beat=心跳检测
jobconf_monitor=任务调度中心监控报警
jobconf_monitor_detail=监控告警明细
jobconf_monitor_alarm_title=告警类型
jobconf_monitor_alarm_type=调度失败
jobconf_monitor_alarm_content=告警内容
jobconf_trigger_admin_adress=调度机器
jobconf_trigger_exe_regtype=执行器-注册方式
jobconf_trigger_exe_regaddress=执行器-地址列表
jobconf_trigger_address_empty=调度失败:执行器地址为空
jobconf_trigger_run=触发调度
jobconf_trigger_child_run=触发子任务
jobconf_callback_child_msg1={0}/{1} [任务ID={2}], 触发{3}, 触发备注: {4} <br>
jobconf_callback_child_msg2={0}/{1} [任务ID={2}], 触发失败, 触发备注: 任务ID格式错误 <br>
jobconf_trigger_type=任务触发类型
jobconf_trigger_type_cron=Cron触发
jobconf_trigger_type_manual=手动触发
jobconf_trigger_type_parent=父任务触发
jobconf_trigger_type_api=API触发
jobconf_trigger_type_retry=失败重试触发
jobconf_trigger_type_misfire=调度过期补偿
## user
user_manage=用户管理
user_username=账号
user_password=密码
user_role=角色
user_role_admin=管理员
user_role_normal=普通用户
user_permission=权限
user_add=新增用户
user_update=更新用户
user_username_repeat=账号重复
user_username_valid=限制以小写字母开头,由小写字母、数字组成
user_password_update_placeholder=请输入新密码,为空则不更新密码
user_update_loginuser_limit=禁止操作当前登录账号
## help
job_help=使用教程
job_help_document=官方文档

View File

@ -1,277 +0,0 @@
admin_name=任務調度中心
admin_name_full=分布式任務調度平臺XXL-JOB
admin_version=2.5.0-SNAPSHOT
admin_i18n=
## system
system_tips=系統提示
system_ok=確定
system_close=關閉
system_save=儲存
system_cancel=取消
system_search=搜尋
system_status=狀態
system_opt=操作
system_please_input=請輸入
system_please_choose=请選擇
system_success=成功
system_fail=失敗
system_add_suc=新增成功
system_add_fail=新增失敗
system_update_suc=更新成功
system_update_fail=更新失敗
system_all=全部
system_api_error=API錯誤
system_show=查看
system_empty=
system_opt_suc=操作成功
system_opt_fail=操作失敗
system_opt_edit=編輯
system_opt_del=刪除
system_opt_copy=復制
system_unvalid=非法
system_not_found=不存在
system_nav=導航
system_digits=整數
system_lengh_limit=長度限制
system_permission_limit=權限控管
system_welcome=歡迎
## daterangepicker
daterangepicker_ranges_recent_hour=最近一小時
daterangepicker_ranges_today=今日
daterangepicker_ranges_yesterday=昨日
daterangepicker_ranges_this_month=本月
daterangepicker_ranges_last_month=上個月
daterangepicker_ranges_recent_week=最近一周
daterangepicker_ranges_recent_month=最近一月
daterangepicker_custom_name=自定義
daterangepicker_custom_starttime=起始時間
daterangepicker_custom_endtime=結束時間
daterangepicker_custom_daysofweek=日,一,二,三,四,五,六
daterangepicker_custom_monthnames=一月,二月,三月,四月,五月,六月,七月,八月,九月,十月,十一月,十二月
## dataTable
dataTable_sProcessing=處理中...
dataTable_sLengthMenu=每頁 _MENU_ 條記錄
dataTable_sZeroRecords=沒有相符合記錄
dataTable_sInfo=第 _PAGE_ 頁 ( 總共 _PAGES_ 頁_TOTAL_ 條記錄 )
dataTable_sInfoEmpty=無記錄
dataTable_sInfoFiltered=(由 _MAX_ 項結果過濾)
dataTable_sSearch=搜尋
dataTable_sEmptyTable=表中資料為空
dataTable_sLoadingRecords=載入中...
dataTable_sFirst=首頁
dataTable_sPrevious=上頁
dataTable_sNext=下頁
dataTable_sLast=末頁
dataTable_sSortAscending=: 以升幂排序此列
dataTable_sSortDescending=: 以降幂排序此列
## login
login_btn=登入
login_remember_me=記住密碼
login_username_placeholder=請輸入登入帳號
login_password_placeholder=請輸入登入密碼
login_username_empty=請輸入登入帳號
login_username_lt_4=登入帳號不應低於4位數
login_password_empty=請輸入登入密碼
login_password_lt_4=登入密碼不應低於4位數
login_success=登入成功
login_fail=登入失敗
login_param_empty=帳號或密碼為空值
login_param_unvalid=帳號或密碼錯誤
## logout
logout_btn=登出
logout_confirm=確認登出?
logout_success=登出成功
logout_fail=登出失敗
## change pwd
change_pwd=修改密碼
change_pwd_suc_to_logout=修改密碼成功,即將登出
change_pwd_field_oldpwd=舊密碼
change_pwd_field_newpwd=新密碼
## dashboard
job_dashboard_name=運行報表
job_dashboard_job_num=任務數量
job_dashboard_job_num_tip=調度中心運行的任務數量
job_dashboard_trigger_num=調度次數
job_dashboard_trigger_num_tip=調度中心觸發的調度次數
job_dashboard_jobgroup_num=執行器數量
job_dashboard_jobgroup_num_tip=調度中心在線的執行器機器數量
job_dashboard_report=調度報表
job_dashboard_report_loaddata_fail=調度報表資料加載異常
job_dashboard_date_report=日期分布圖
job_dashboard_rate_report=成功比例圖
## job info
jobinfo_name=任務管理
jobinfo_job=任務
jobinfo_field_add=新增
jobinfo_field_update=更新任務
jobinfo_field_id=任務ID
jobinfo_field_jobgroup=執行器
jobinfo_field_jobdesc=任務描述
jobinfo_field_gluetype=運行模式
jobinfo_field_executorparam=任務參數
jobinfo_field_author=負責人
jobinfo_field_timeout=任務超時秒數
jobinfo_field_alarmemail=告警郵件
jobinfo_field_alarmemail_placeholder=輸入多個告警郵件地址,請以逗號分隔
jobinfo_field_executorRouteStrategy=路由策略
jobinfo_field_childJobId=子任務ID
jobinfo_field_childJobId_placeholder=輸入子任務ID如有多個請以逗號分隔
jobinfo_field_executorBlockStrategy=阻塞處理策略
jobinfo_field_executorFailRetryCount=失敗重試次數
jobinfo_field_executorFailRetryCount_placeholder=失敗重試次數,大於零時生效
jobinfo_script_location=腳本位置
jobinfo_shard_index=分片序號
jobinfo_shard_total=分片總數
jobinfo_opt_stop=停止
jobinfo_opt_start=啟動
jobinfo_opt_log=查詢日誌
jobinfo_opt_run=執行一次
jobinfo_opt_run_tips=請輸入本次執行的機器地址,為空則從執行器獲取
jobinfo_opt_registryinfo=注冊節點
jobinfo_opt_next_time=下次執行時間
jobinfo_glue_remark=源碼備註
jobinfo_glue_remark_limit=源碼備註長度限制為4~100
jobinfo_glue_rollback=版本回復
jobinfo_glue_jobid_unvalid=任務ID非法
jobinfo_glue_gluetype_unvalid=該任務非GLUE模式
jobinfo_field_executorTimeout_placeholder=任務超時時間,單位秒,大於零時生效
schedule_type=調度類型
schedule_type_none=
schedule_type_cron=CRON
schedule_type_fix_rate=固定速度
schedule_type_fix_delay=固定延遲
schedule_type_none_limit_start=當前調度類型禁止啟動
misfire_strategy=調度過期策略
misfire_strategy_do_nothing=忽略
misfire_strategy_fire_once_now=立即執行壹次
jobinfo_conf_base=基礎配置
jobinfo_conf_schedule=調度配置
jobinfo_conf_job=任務配置
jobinfo_conf_advanced=高級配置
## job log
joblog_name=調度日誌
joblog_status=狀態
joblog_status_all=全部
joblog_status_suc=成功
joblog_status_fail=失敗
joblog_status_running=進行中
joblog_field_triggerTime=調度時間
joblog_field_triggerCode=調度結果
joblog_field_triggerMsg=調度備註
joblog_field_handleTime=執行時間
joblog_field_handleCode=執行结果
joblog_field_handleMsg=執行備註
joblog_field_executorAddress=執行器地址
joblog_clean=清理
joblog_clean_log=日誌清理
joblog_clean_type=清理方式
joblog_clean_type_1=清理一個月之前日誌資料
joblog_clean_type_2=清理三個月之前日誌資料
joblog_clean_type_3=清理六個月之前日誌資料
joblog_clean_type_4=清理一年之前日誌資料
joblog_clean_type_5=清理一千條以前日誌資料
joblog_clean_type_6=清理一萬條以前日誌資料
joblog_clean_type_7=清理三萬條以前日誌資料
joblog_clean_type_8=清理十萬條以前日誌資料
joblog_clean_type_9=清理所有日誌資料
joblog_clean_type_unvalid=清理類型參数異常
joblog_handleCode_200=成功
joblog_handleCode_500=失敗
joblog_handleCode_502=失敗(超時)
joblog_kill_log=终止任務
joblog_kill_log_limit=調度失敗,無法终止日誌
joblog_kill_log_byman=人為操作,主動終止
joblog_lost_fail=任務結果丟失,標記失敗
joblog_rolling_log=執行日誌
joblog_rolling_log_refresh=更新
joblog_rolling_log_triggerfail=任務發起調度失敗,無法查看執行日誌
joblog_rolling_log_failoften=終止請求Rolling日誌請求失敗次數超上限可刷新頁面重新加載日誌
joblog_logid_unvalid=日誌ID非法
## job group
jobgroup_name=執行器管理
jobgroup_list=執行器列表
jobgroup_add=新增執行器
jobgroup_edit=編輯執行器
jobgroup_del=刪除執行器
jobgroup_field_title=名稱
jobgroup_field_addressType=注冊方式
jobgroup_field_addressType_0=自動注冊
jobgroup_field_addressType_1=手動登錄
jobgroup_field_addressType_limit=手動登錄注冊方式,機器地址不可為空
jobgroup_field_registryList=機器地址
jobgroup_field_registryList_unvalid=機器地址格式非法
jobgroup_field_registryList_placeholder=請輸入執行器地址列表,多個地址請以逗號分隔
jobgroup_field_appname_limit=限制以小寫字母開頭,由小寫字母、數字和中划線組成
jobgroup_field_appname_length=AppName長度限制為4~64
jobgroup_field_title_length=名稱長度限制為4~12
jobgroup_field_order_digits=請輸入整數
jobgroup_field_orderrange=取值範圍為1~1000
jobgroup_del_limit_0=拒絕刪除,該執行器使用中
jobgroup_del_limit_1=拒絕删除,系统至少保留一個執行器
jobgroup_empty=不存在有效執行器,請聯絡系統管理員
## job conf
jobconf_block_SERIAL_EXECUTION=單機串行
jobconf_block_DISCARD_LATER=丢棄后續調度
jobconf_block_COVER_EARLY=覆蓋之前調度
jobconf_route_first=第一個
jobconf_route_last=最後一個
jobconf_route_round=輪詢
jobconf_route_random=隨機
jobconf_route_consistenthash=一致性HASH
jobconf_route_lfu=最不經常使用
jobconf_route_lru=最近最久未使用
jobconf_route_failover=故障轉移
jobconf_route_busyover=忙碌轉移
jobconf_route_shard=分片廣播
jobconf_idleBeat=空閒檢測
jobconf_beat=心跳檢測
jobconf_monitor=任務調度中心監控告警
jobconf_monitor_detail=監控告警明细
jobconf_monitor_alarm_title=告警類型
jobconf_monitor_alarm_type=調度失敗
jobconf_monitor_alarm_content=告警内容
jobconf_trigger_admin_adress=調度機器
jobconf_trigger_exe_regtype=執行器-注冊方式
jobconf_trigger_exe_regaddress=執行器-地址列表
jobconf_trigger_address_empty=調度失敗:執行器地址為空
jobconf_trigger_run=觸發調度
jobconf_trigger_child_run=觸發子任務
jobconf_callback_child_msg1={0}/{1} [任務ID={2}], 觸發{3}, 觸發備註: {4} <br>
jobconf_callback_child_msg2={0}/{1} [任務ID={2}], 觸發失败, 觸發備註: 任務ID格式錯誤 <br>
jobconf_trigger_type=任務觸發類型
jobconf_trigger_type_cron=Cron觸發
jobconf_trigger_type_manual=手動觸發
jobconf_trigger_type_parent=父任務觸發
jobconf_trigger_type_api=API觸發
jobconf_trigger_type_retry=失敗重試觸發
jobconf_trigger_type_misfire=調度過期補償
## user
user_manage=用户管理
user_username=帳號
user_password=密碼
user_role=角色
user_role_admin=管理員
user_role_normal=普通用戶
user_permission=權限
user_add=新增用戶
user_update=更新用戶
user_username_repeat=帳號重複
user_username_valid=限制以小寫字母開頭,由小寫字母、數字組成
user_password_update_placeholder=請輸入新密碼,為空則不更新密碼
user_update_loginuser_limit=禁止操作當前登入帳號
## help
job_help=使用教程
job_help_document=官方文件

View File

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false" scan="true" scanPeriod="1 seconds">
<contextName>logback</contextName>
<property name="log.path" value="logs/xxl-job/xxl-job-admin.log"/>
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}.%d{yyyy-MM-dd}.zip</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>%date %level [%thread] %logger{36} [%file : %line] %msg%n
</pattern>
</encoder>
</appender>
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="file"/>
</root>
</configuration>

View File

@ -1,91 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobGroupDao">
<resultMap id="XxlJobGroup" type="com.xxl.job.admin.core.model.XxlJobGroup" >
<result column="id" property="id" />
<result column="app_name" property="appname" />
<result column="title" property="title" />
<result column="address_type" property="addressType" />
<result column="address_list" property="addressList" />
<result column="update_time" property="updateTime" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.app_name,
t.title,
t.address_type,
t.address_list,
t.update_time
</sql>
<select id="findAll" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t
ORDER BY t.app_name, t.title, t.id ASC
</select>
<select id="findByAddressType" parameterType="java.lang.Integer" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t
WHERE t.address_type = #{addressType}
ORDER BY t.app_name, t.title, t.id ASC
</select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_group ( `app_name`, `title`, `address_type`, `address_list`, `update_time`)
values ( #{appname}, #{title}, #{addressType}, #{addressList}, #{updateTime} );
</insert>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" >
UPDATE xxl_job_group
SET `app_name` = #{appname},
`title` = #{title},
`address_type` = #{addressType},
`address_list` = #{addressList},
`update_time` = #{updateTime}
WHERE id = #{id}
</update>
<delete id="remove" parameterType="java.lang.Integer" >
DELETE FROM xxl_job_group
WHERE id = #{id}
</delete>
<select id="load" parameterType="java.lang.Integer" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t
WHERE t.id = #{id}
</select>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="appname != null and appname != ''">
AND t.app_name like CONCAT(CONCAT('%', #{appname}), '%')
</if>
<if test="title != null and title != ''">
AND t.title like CONCAT(CONCAT('%', #{title}), '%')
</if>
</trim>
ORDER BY t.app_name, t.title, t.id ASC
LIMIT #{offset}, #{pagesize}
</select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1)
FROM xxl_job_group AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="appname != null and appname != ''">
AND t.app_name like CONCAT(CONCAT('%', #{appname}), '%')
</if>
<if test="title != null and title != ''">
AND t.title like CONCAT(CONCAT('%', #{title}), '%')
</if>
</trim>
</select>
</mapper>

View File

@ -1,240 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobInfoDao">
<resultMap id="XxlJobInfo" type="com.xxl.job.admin.core.model.XxlJobInfo" >
<result column="id" property="id" />
<result column="job_group" property="jobGroup" />
<result column="job_desc" property="jobDesc" />
<result column="add_time" property="addTime" />
<result column="update_time" property="updateTime" />
<result column="author" property="author" />
<result column="alarm_email" property="alarmEmail" />
<result column="schedule_type" property="scheduleType" />
<result column="schedule_conf" property="scheduleConf" />
<result column="misfire_strategy" property="misfireStrategy" />
<result column="executor_route_strategy" property="executorRouteStrategy" />
<result column="executor_handler" property="executorHandler" />
<result column="executor_param" property="executorParam" />
<result column="executor_block_strategy" property="executorBlockStrategy" />
<result column="executor_timeout" property="executorTimeout" />
<result column="executor_fail_retry_count" property="executorFailRetryCount" />
<result column="glue_type" property="glueType" />
<result column="glue_source" property="glueSource" />
<result column="glue_remark" property="glueRemark" />
<result column="glue_updatetime" property="glueUpdatetime" />
<result column="child_jobid" property="childJobId" />
<result column="trigger_status" property="triggerStatus" />
<result column="trigger_last_time" property="triggerLastTime" />
<result column="trigger_next_time" property="triggerNextTime" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.job_group,
t.job_desc,
t.add_time,
t.update_time,
t.author,
t.alarm_email,
t.schedule_type,
t.schedule_conf,
t.misfire_strategy,
t.executor_route_strategy,
t.executor_handler,
t.executor_param,
t.executor_block_strategy,
t.executor_timeout,
t.executor_fail_retry_count,
t.glue_type,
t.glue_source,
t.glue_remark,
t.glue_updatetime,
t.child_jobid,
t.trigger_status,
t.trigger_last_time,
t.trigger_next_time
</sql>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="triggerStatus gte 0">
AND t.trigger_status = #{triggerStatus}
</if>
<if test="jobDesc != null and jobDesc != ''">
AND t.job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%')
</if>
<if test="executorHandler != null and executorHandler != ''">
AND t.executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%')
</if>
<if test="author != null and author != ''">
AND t.author like CONCAT(CONCAT('%', #{author}), '%')
</if>
</trim>
ORDER BY id DESC
LIMIT #{offset}, #{pagesize}
</select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1)
FROM xxl_job_info AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="triggerStatus gte 0">
AND t.trigger_status = #{triggerStatus}
</if>
<if test="jobDesc != null and jobDesc != ''">
AND t.job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%')
</if>
<if test="executorHandler != null and executorHandler != ''">
AND t.executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%')
</if>
<if test="author != null and author != ''">
AND t.author like CONCAT(CONCAT('%', #{author}), '%')
</if>
</trim>
</select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_info (
job_group,
job_desc,
add_time,
update_time,
author,
alarm_email,
schedule_type,
schedule_conf,
misfire_strategy,
executor_route_strategy,
executor_handler,
executor_param,
executor_block_strategy,
executor_timeout,
executor_fail_retry_count,
glue_type,
glue_source,
glue_remark,
glue_updatetime,
child_jobid,
trigger_status,
trigger_last_time,
trigger_next_time
) VALUES (
#{jobGroup},
#{jobDesc},
#{addTime},
#{updateTime},
#{author},
#{alarmEmail},
#{scheduleType},
#{scheduleConf},
#{misfireStrategy},
#{executorRouteStrategy},
#{executorHandler},
#{executorParam},
#{executorBlockStrategy},
#{executorTimeout},
#{executorFailRetryCount},
#{glueType},
#{glueSource},
#{glueRemark},
#{glueUpdatetime},
#{childJobId},
#{triggerStatus},
#{triggerLastTime},
#{triggerNextTime}
);
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID()
/*SELECT @@IDENTITY AS id*/
</selectKey>-->
</insert>
<select id="loadById" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t
WHERE t.id = #{id}
</select>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" >
UPDATE xxl_job_info
SET
job_group = #{jobGroup},
job_desc = #{jobDesc},
update_time = #{updateTime},
author = #{author},
alarm_email = #{alarmEmail},
schedule_type = #{scheduleType},
schedule_conf = #{scheduleConf},
misfire_strategy = #{misfireStrategy},
executor_route_strategy = #{executorRouteStrategy},
executor_handler = #{executorHandler},
executor_param = #{executorParam},
executor_block_strategy = #{executorBlockStrategy},
executor_timeout = ${executorTimeout},
executor_fail_retry_count = ${executorFailRetryCount},
glue_type = #{glueType},
glue_source = #{glueSource},
glue_remark = #{glueRemark},
glue_updatetime = #{glueUpdatetime},
child_jobid = #{childJobId},
trigger_status = #{triggerStatus},
trigger_last_time = #{triggerLastTime},
trigger_next_time = #{triggerNextTime}
WHERE id = #{id}
</update>
<delete id="delete" parameterType="java.util.HashMap">
DELETE
FROM xxl_job_info
WHERE id = #{id}
</delete>
<select id="getJobsByGroup" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t
WHERE t.job_group = #{jobGroup}
</select>
<select id="findAllCount" resultType="int">
SELECT count(1)
FROM xxl_job_info
</select>
<select id="scheduleJobQuery" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t
WHERE t.trigger_status = 1
and t.trigger_next_time <![CDATA[ <= ]]> #{maxNextTime}
ORDER BY id ASC
LIMIT #{pagesize}
</select>
<update id="scheduleUpdate" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" >
UPDATE xxl_job_info
SET
trigger_last_time = #{triggerLastTime},
trigger_next_time = #{triggerNextTime},
trigger_status = #{triggerStatus}
WHERE id = #{id}
</update>
</mapper>

View File

@ -1,71 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobLogGlueDao">
<resultMap id="XxlJobLogGlue" type="com.xxl.job.admin.core.model.XxlJobLogGlue" >
<result column="id" property="id" />
<result column="job_id" property="jobId" />
<result column="glue_type" property="glueType" />
<result column="glue_source" property="glueSource" />
<result column="glue_remark" property="glueRemark" />
<result column="add_time" property="addTime" />
<result column="update_time" property="updateTime" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.job_id,
t.glue_type,
t.glue_source,
t.glue_remark,
t.add_time,
t.update_time
</sql>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogGlue" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_logglue (
`job_id`,
`glue_type`,
`glue_source`,
`glue_remark`,
`add_time`,
`update_time`
) VALUES (
#{jobId},
#{glueType},
#{glueSource},
#{glueRemark},
#{addTime},
#{updateTime}
);
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID()
</selectKey>-->
</insert>
<select id="findByJobId" parameterType="java.lang.Integer" resultMap="XxlJobLogGlue">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_logglue AS t
WHERE t.job_id = #{jobId}
ORDER BY id DESC
</select>
<delete id="removeOld" >
DELETE FROM xxl_job_logglue
WHERE id NOT in(
SELECT id FROM(
SELECT id FROM xxl_job_logglue
WHERE `job_id` = #{jobId}
ORDER BY update_time desc
LIMIT 0, #{limit}
) t1
) AND `job_id` = #{jobId}
</delete>
<delete id="deleteByJobId" parameterType="java.lang.Integer" >
DELETE FROM xxl_job_logglue
WHERE `job_id` = #{jobId}
</delete>
</mapper>

View File

@ -1,273 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobLogDao">
<resultMap id="XxlJobLog" type="com.xxl.job.admin.core.model.XxlJobLog" >
<result column="id" property="id" />
<result column="job_group" property="jobGroup" />
<result column="job_id" property="jobId" />
<result column="executor_address" property="executorAddress" />
<result column="executor_handler" property="executorHandler" />
<result column="executor_param" property="executorParam" />
<result column="executor_sharding_param" property="executorShardingParam" />
<result column="executor_fail_retry_count" property="executorFailRetryCount" />
<result column="trigger_time" property="triggerTime" />
<result column="trigger_code" property="triggerCode" />
<result column="trigger_msg" property="triggerMsg" />
<result column="handle_time" property="handleTime" />
<result column="handle_code" property="handleCode" />
<result column="handle_msg" property="handleMsg" />
<result column="alarm_status" property="alarmStatus" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.job_group,
t.job_id,
t.executor_address,
t.executor_handler,
t.executor_param,
t.executor_sharding_param,
t.executor_fail_retry_count,
t.trigger_time,
t.trigger_code,
t.trigger_msg,
t.handle_time,
t.handle_code,
t.handle_msg,
t.alarm_status
</sql>
<select id="pageList" resultMap="XxlJobLog">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_log AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobId==0 and jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND t.job_id = #{jobId}
</if>
<if test="triggerTimeStart != null">
AND t.trigger_time <![CDATA[ >= ]]> #{triggerTimeStart}
</if>
<if test="triggerTimeEnd != null">
AND t.trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd}
</if>
<if test="logStatus == 1" >
AND t.handle_code = 200
</if>
<if test="logStatus == 2" >
AND (
t.trigger_code NOT IN (0, 200) OR
t.handle_code NOT IN (0, 200)
)
</if>
<if test="logStatus == 3" >
AND t.trigger_code = 200
AND t.handle_code = 0
</if>
</trim>
ORDER BY t.trigger_time DESC
LIMIT #{offset}, #{pagesize}
</select>
<select id="pageListCount" resultType="int">
SELECT count(1)
FROM xxl_job_log AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobId==0 and jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND t.job_id = #{jobId}
</if>
<if test="triggerTimeStart != null">
AND t.trigger_time <![CDATA[ >= ]]> #{triggerTimeStart}
</if>
<if test="triggerTimeEnd != null">
AND t.trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd}
</if>
<if test="logStatus == 1" >
AND t.handle_code = 200
</if>
<if test="logStatus == 2" >
AND (
t.trigger_code NOT IN (0, 200) OR
t.handle_code NOT IN (0, 200)
)
</if>
<if test="logStatus == 3" >
AND t.trigger_code = 200
AND t.handle_code = 0
</if>
</trim>
</select>
<select id="load" parameterType="java.lang.Long" resultMap="XxlJobLog">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_log AS t
WHERE t.id = #{id}
</select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLog" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_log (
`job_group`,
`job_id`,
`trigger_time`,
`trigger_code`,
`handle_code`
) VALUES (
#{jobGroup},
#{jobId},
#{triggerTime},
#{triggerCode},
#{handleCode}
);
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID()
</selectKey>-->
</insert>
<update id="updateTriggerInfo" >
UPDATE xxl_job_log
SET
`trigger_time`= #{triggerTime},
`trigger_code`= #{triggerCode},
`trigger_msg`= #{triggerMsg},
`executor_address`= #{executorAddress},
`executor_handler`=#{executorHandler},
`executor_param`= #{executorParam},
`executor_sharding_param`= #{executorShardingParam},
`executor_fail_retry_count`= #{executorFailRetryCount}
WHERE `id`= #{id}
</update>
<update id="updateHandleInfo">
UPDATE xxl_job_log
SET
`handle_time`= #{handleTime},
`handle_code`= #{handleCode},
`handle_msg`= #{handleMsg}
WHERE `id`= #{id}
</update>
<delete id="delete" >
delete from xxl_job_log
WHERE job_id = #{jobId}
</delete>
<!--<select id="triggerCountByDay" resultType="java.util.Map" >
SELECT
DATE_FORMAT(trigger_time,'%Y-%m-%d') triggerDay,
COUNT(handle_code) triggerDayCount,
SUM(CASE WHEN (trigger_code in (0, 200) and handle_code = 0) then 1 else 0 end) as triggerDayCountRunning,
SUM(CASE WHEN handle_code = 200 then 1 else 0 end) as triggerDayCountSuc
FROM xxl_job_log
WHERE trigger_time BETWEEN #{from} and #{to}
GROUP BY triggerDay
ORDER BY triggerDay
</select>-->
<select id="findLogReport" resultType="java.util.Map" >
SELECT
COUNT(handle_code) triggerDayCount,
SUM(CASE WHEN (trigger_code in (0, 200) and handle_code = 0) then 1 else 0 end) as triggerDayCountRunning,
SUM(CASE WHEN handle_code = 200 then 1 else 0 end) as triggerDayCountSuc
FROM xxl_job_log
WHERE trigger_time BETWEEN #{from} and #{to}
</select>
<select id="findClearLogIds" resultType="long" >
SELECT id FROM xxl_job_log
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0">
AND job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND job_id = #{jobId}
</if>
<if test="clearBeforeTime != null">
AND trigger_time <![CDATA[ <= ]]> #{clearBeforeTime}
</if>
<if test="clearBeforeNum gt 0">
AND id NOT in(
SELECT id FROM(
SELECT id FROM xxl_job_log AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND t.job_id = #{jobId}
</if>
</trim>
ORDER BY t.trigger_time desc
LIMIT 0, #{clearBeforeNum}
) t1
)
</if>
</trim>
order by id asc
LIMIT #{pagesize}
</select>
<delete id="clearLog" >
delete from xxl_job_log
WHERE id in
<foreach collection="logIds" item="item" open="(" close=")" separator="," >
#{item}
</foreach>
</delete>
<select id="findFailJobLogIds" resultType="long" >
SELECT id FROM `xxl_job_log`
WHERE !(
(trigger_code in (0, 200) and handle_code = 0)
OR
(handle_code = 200)
)
AND `alarm_status` = 0
ORDER BY id ASC
LIMIT #{pagesize}
</select>
<update id="updateAlarmStatus" >
UPDATE xxl_job_log
SET
`alarm_status` = #{newAlarmStatus}
WHERE `id`= #{logId} AND `alarm_status` = #{oldAlarmStatus}
</update>
<select id="findLostJobIds" resultType="long" >
SELECT
t.id
FROM
xxl_job_log t
LEFT JOIN xxl_job_registry t2 ON t.executor_address = t2.registry_value
WHERE
t.trigger_code = 200
AND t.handle_code = 0
AND t.trigger_time <![CDATA[ <= ]]> #{losedTime}
AND t2.id IS NULL;
</select>
<!--
SELECT t.id
FROM xxl_job_log AS t
WHERE t.trigger_code = 200
and t.handle_code = 0
and t.trigger_time <![CDATA[ <= ]]> #{losedTime}
and t.executor_address not in (
SELECT t2.registry_value
FROM xxl_job_registry AS t2
)
-->
</mapper>

View File

@ -1,62 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobLogReportDao">
<resultMap id="XxlJobLogReport" type="com.xxl.job.admin.core.model.XxlJobLogReport" >
<result column="id" property="id" />
<result column="trigger_day" property="triggerDay" />
<result column="running_count" property="runningCount" />
<result column="suc_count" property="sucCount" />
<result column="fail_count" property="failCount" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.trigger_day,
t.running_count,
t.suc_count,
t.fail_count
</sql>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogReport" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_log_report (
`trigger_day`,
`running_count`,
`suc_count`,
`fail_count`
) VALUES (
#{triggerDay},
#{runningCount},
#{sucCount},
#{failCount}
);
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID()
</selectKey>-->
</insert>
<update id="update" >
UPDATE xxl_job_log_report
SET `running_count` = #{runningCount},
`suc_count` = #{sucCount},
`fail_count` = #{failCount}
WHERE `trigger_day` = #{triggerDay}
</update>
<select id="queryLogReport" resultMap="XxlJobLogReport">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_log_report AS t
WHERE t.trigger_day between #{triggerDayFrom} and #{triggerDayTo}
ORDER BY t.trigger_day ASC
</select>
<select id="queryLogReportTotal" resultMap="XxlJobLogReport">
SELECT
SUM(running_count) running_count,
SUM(suc_count) suc_count,
SUM(fail_count) fail_count
FROM xxl_job_log_report AS t
</select>
</mapper>

View File

@ -1,62 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobRegistryDao">
<resultMap id="XxlJobRegistry" type="com.xxl.job.admin.core.model.XxlJobRegistry" >
<result column="id" property="id" />
<result column="registry_group" property="registryGroup" />
<result column="registry_key" property="registryKey" />
<result column="registry_value" property="registryValue" />
<result column="update_time" property="updateTime" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.registry_group,
t.registry_key,
t.registry_value,
t.update_time
</sql>
<select id="findDead" parameterType="java.util.HashMap" resultType="java.lang.Integer" >
SELECT t.id
FROM xxl_job_registry AS t
WHERE t.update_time <![CDATA[ < ]]> DATE_ADD(#{nowTime},INTERVAL -#{timeout} SECOND)
</select>
<delete id="removeDead" parameterType="java.lang.Integer" >
DELETE FROM xxl_job_registry
WHERE id in
<foreach collection="ids" item="item" open="(" close=")" separator="," >
#{item}
</foreach>
</delete>
<select id="findAll" parameterType="java.util.HashMap" resultMap="XxlJobRegistry">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_registry AS t
WHERE t.update_time <![CDATA[ > ]]> DATE_ADD(#{nowTime},INTERVAL -#{timeout} SECOND)
</select>
<update id="registryUpdate" >
UPDATE xxl_job_registry
SET `update_time` = #{updateTime}
WHERE `registry_group` = #{registryGroup}
AND `registry_key` = #{registryKey}
AND `registry_value` = #{registryValue}
</update>
<insert id="registrySave" >
INSERT INTO xxl_job_registry( `registry_group` , `registry_key` , `registry_value`, `update_time`)
VALUES( #{registryGroup} , #{registryKey} , #{registryValue}, #{updateTime})
</insert>
<delete id="registryDelete" >
DELETE FROM xxl_job_registry
WHERE registry_group = #{registryGroup}
AND registry_key = #{registryKey}
AND registry_value = #{registryValue}
</delete>
</mapper>

View File

@ -1,87 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobUserDao">
<resultMap id="XxlJobUser" type="com.xxl.job.admin.core.model.XxlJobUser" >
<result column="id" property="id" />
<result column="username" property="username" />
<result column="password" property="password" />
<result column="role" property="role" />
<result column="permission" property="permission" />
</resultMap>
<sql id="Base_Column_List">
t.id,
t.username,
t.password,
t.role,
t.permission
</sql>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobUser">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_user AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="username != null and username != ''">
AND t.username like CONCAT(CONCAT('%', #{username}), '%')
</if>
<if test="role gt -1">
AND t.role = #{role}
</if>
</trim>
ORDER BY username ASC
LIMIT #{offset}, #{pagesize}
</select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1)
FROM xxl_job_user AS t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="username != null and username != ''">
AND t.username like CONCAT(CONCAT('%', #{username}), '%')
</if>
<if test="role gt -1">
AND t.role = #{role}
</if>
</trim>
</select>
<select id="loadByUserName" parameterType="java.util.HashMap" resultMap="XxlJobUser">
SELECT <include refid="Base_Column_List" />
FROM xxl_job_user AS t
WHERE t.username = #{username}
</select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobUser" useGeneratedKeys="true" keyProperty="id" >
INSERT INTO xxl_job_user (
username,
password,
role,
permission
) VALUES (
#{username},
#{password},
#{role},
#{permission}
);
</insert>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobUser" >
UPDATE xxl_job_user
SET
<if test="password != null and password != ''">
password = #{password},
</if>
role = #{role},
permission = #{permission}
WHERE id = #{id}
</update>
<delete id="delete" parameterType="java.util.HashMap">
DELETE
FROM xxl_job_user
WHERE id = #{id}
</delete>
</mapper>

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 326 KiB

File diff suppressed because one or more lines are too long

View File

@ -1,77 +0,0 @@
/* This is a compiled file, you should be editing the file in the templates directory */
.pace {
-webkit-pointer-events: none;
pointer-events: none;
-webkit-user-select: none;
-moz-user-select: none;
user-select: none;
}
.pace-inactive {
display: none;
}
.pace .pace-progress {
background: #2299dd;
position: fixed;
z-index: 2000;
top: 0;
right: 100%;
width: 100%;
height: 2px;
}
.pace .pace-progress-inner {
display: block;
position: absolute;
right: 0px;
width: 100px;
height: 100%;
box-shadow: 0 0 10px #2299dd, 0 0 5px #2299dd;
opacity: 1.0;
-webkit-transform: rotate(3deg) translate(0px, -4px);
-moz-transform: rotate(3deg) translate(0px, -4px);
-ms-transform: rotate(3deg) translate(0px, -4px);
-o-transform: rotate(3deg) translate(0px, -4px);
transform: rotate(3deg) translate(0px, -4px);
}
.pace .pace-activity {
display: block;
position: fixed;
z-index: 2000;
top: 15px;
right: 15px;
width: 14px;
height: 14px;
border: solid 2px transparent;
border-top-color: #2299dd;
border-left-color: #2299dd;
border-radius: 10px;
-webkit-animation: pace-spinner 400ms linear infinite;
-moz-animation: pace-spinner 400ms linear infinite;
-ms-animation: pace-spinner 400ms linear infinite;
-o-animation: pace-spinner 400ms linear infinite;
animation: pace-spinner 400ms linear infinite;
}
@-webkit-keyframes pace-spinner {
0% { -webkit-transform: rotate(0deg); transform: rotate(0deg); }
100% { -webkit-transform: rotate(360deg); transform: rotate(360deg); }
}
@-moz-keyframes pace-spinner {
0% { -moz-transform: rotate(0deg); transform: rotate(0deg); }
100% { -moz-transform: rotate(360deg); transform: rotate(360deg); }
}
@-o-keyframes pace-spinner {
0% { -o-transform: rotate(0deg); transform: rotate(0deg); }
100% { -o-transform: rotate(360deg); transform: rotate(360deg); }
}
@-ms-keyframes pace-spinner {
0% { -ms-transform: rotate(0deg); transform: rotate(0deg); }
100% { -ms-transform: rotate(360deg); transform: rotate(360deg); }
}
@keyframes pace-spinner {
0% { transform: rotate(0deg); transform: rotate(0deg); }
100% { transform: rotate(360deg); transform: rotate(360deg); }
}

View File

@ -1,269 +0,0 @@
.daterangepicker {
position: absolute;
color: inherit;
background-color: #fff;
border-radius: 4px;
width: 278px;
padding: 4px;
margin-top: 1px;
top: 100px;
left: 20px;
/* Calendars */ }
.daterangepicker:before, .daterangepicker:after {
position: absolute;
display: inline-block;
border-bottom-color: rgba(0, 0, 0, 0.2);
content: ''; }
.daterangepicker:before {
top: -7px;
border-right: 7px solid transparent;
border-left: 7px solid transparent;
border-bottom: 7px solid #ccc; }
.daterangepicker:after {
top: -6px;
border-right: 6px solid transparent;
border-bottom: 6px solid #fff;
border-left: 6px solid transparent; }
.daterangepicker.opensleft:before {
right: 9px; }
.daterangepicker.opensleft:after {
right: 10px; }
.daterangepicker.openscenter:before {
left: 0;
right: 0;
width: 0;
margin-left: auto;
margin-right: auto; }
.daterangepicker.openscenter:after {
left: 0;
right: 0;
width: 0;
margin-left: auto;
margin-right: auto; }
.daterangepicker.opensright:before {
left: 9px; }
.daterangepicker.opensright:after {
left: 10px; }
.daterangepicker.dropup {
margin-top: -5px; }
.daterangepicker.dropup:before {
top: initial;
bottom: -7px;
border-bottom: initial;
border-top: 7px solid #ccc; }
.daterangepicker.dropup:after {
top: initial;
bottom: -6px;
border-bottom: initial;
border-top: 6px solid #fff; }
.daterangepicker.dropdown-menu {
max-width: none;
z-index: 3001; }
.daterangepicker.single .ranges, .daterangepicker.single .calendar {
float: none; }
.daterangepicker.show-calendar .calendar {
display: block; }
.daterangepicker .calendar {
display: none;
max-width: 270px;
margin: 4px; }
.daterangepicker .calendar.single .calendar-table {
border: none; }
.daterangepicker .calendar th, .daterangepicker .calendar td {
white-space: nowrap;
text-align: center;
min-width: 32px; }
.daterangepicker .calendar-table {
border: 1px solid #fff;
padding: 4px;
border-radius: 4px;
background-color: #fff; }
.daterangepicker table {
width: 100%;
margin: 0; }
.daterangepicker td, .daterangepicker th {
text-align: center;
width: 20px;
height: 20px;
border-radius: 4px;
border: 1px solid transparent;
white-space: nowrap;
cursor: pointer; }
.daterangepicker td.available:hover, .daterangepicker th.available:hover {
background-color: #eee;
border-color: transparent;
color: inherit; }
.daterangepicker td.week, .daterangepicker th.week {
font-size: 80%;
color: #ccc; }
.daterangepicker td.off, .daterangepicker td.off.in-range, .daterangepicker td.off.start-date, .daterangepicker td.off.end-date {
background-color: #fff;
border-color: transparent;
color: #999; }
.daterangepicker td.in-range {
background-color: #ebf4f8;
border-color: transparent;
color: #000;
border-radius: 0; }
.daterangepicker td.start-date {
border-radius: 4px 0 0 4px; }
.daterangepicker td.end-date {
border-radius: 0 4px 4px 0; }
.daterangepicker td.start-date.end-date {
border-radius: 4px; }
.daterangepicker td.active, .daterangepicker td.active:hover {
background-color: #357ebd;
border-color: transparent;
color: #fff; }
.daterangepicker th.month {
width: auto; }
.daterangepicker td.disabled, .daterangepicker option.disabled {
color: #999;
cursor: not-allowed;
text-decoration: line-through; }
.daterangepicker select.monthselect, .daterangepicker select.yearselect {
font-size: 12px;
padding: 1px;
height: auto;
margin: 0;
cursor: default; }
.daterangepicker select.monthselect {
margin-right: 2%;
width: 56%; }
.daterangepicker select.yearselect {
width: 40%; }
.daterangepicker select.hourselect, .daterangepicker select.minuteselect, .daterangepicker select.secondselect, .daterangepicker select.ampmselect {
width: 50px;
margin-bottom: 0; }
.daterangepicker .input-mini {
border: 1px solid #ccc;
border-radius: 4px;
color: #555;
height: 30px;
line-height: 30px;
display: block;
vertical-align: middle;
margin: 0 0 5px 0;
padding: 0 6px 0 28px;
width: 100%; }
.daterangepicker .input-mini.active {
border: 1px solid #08c;
border-radius: 4px; }
.daterangepicker .daterangepicker_input {
position: relative; }
.daterangepicker .daterangepicker_input i {
position: absolute;
left: 8px;
top: 8px; }
.daterangepicker.rtl .input-mini {
padding-right: 28px;
padding-left: 6px; }
.daterangepicker.rtl .daterangepicker_input i {
left: auto;
right: 8px; }
.daterangepicker .calendar-time {
text-align: center;
margin: 5px auto;
line-height: 30px;
position: relative;
padding-left: 28px; }
.daterangepicker .calendar-time select.disabled {
color: #ccc;
cursor: not-allowed; }
.ranges {
font-size: 11px;
float: none;
margin: 4px;
text-align: left; }
.ranges ul {
list-style: none;
margin: 0 auto;
padding: 0;
width: 100%; }
.ranges li {
font-size: 13px;
background-color: #f5f5f5;
border: 1px solid #f5f5f5;
border-radius: 4px;
color: #08c;
padding: 3px 12px;
margin-bottom: 8px;
cursor: pointer; }
.ranges li:hover {
background-color: #08c;
border: 1px solid #08c;
color: #fff; }
.ranges li.active {
background-color: #08c;
border: 1px solid #08c;
color: #fff; }
/* Larger Screen Styling */
@media (min-width: 564px) {
.daterangepicker {
width: auto; }
.daterangepicker .ranges ul {
width: 160px; }
.daterangepicker.single .ranges ul {
width: 100%; }
.daterangepicker.single .calendar.left {
clear: none; }
.daterangepicker.single.ltr .ranges, .daterangepicker.single.ltr .calendar {
float: left; }
.daterangepicker.single.rtl .ranges, .daterangepicker.single.rtl .calendar {
float: right; }
.daterangepicker.ltr {
direction: ltr;
text-align: left; }
.daterangepicker.ltr .calendar.left {
clear: left;
margin-right: 0; }
.daterangepicker.ltr .calendar.left .calendar-table {
border-right: none;
border-top-right-radius: 0;
border-bottom-right-radius: 0; }
.daterangepicker.ltr .calendar.right {
margin-left: 0; }
.daterangepicker.ltr .calendar.right .calendar-table {
border-left: none;
border-top-left-radius: 0;
border-bottom-left-radius: 0; }
.daterangepicker.ltr .left .daterangepicker_input {
padding-right: 12px; }
.daterangepicker.ltr .calendar.left .calendar-table {
padding-right: 12px; }
.daterangepicker.ltr .ranges, .daterangepicker.ltr .calendar {
float: left; }
.daterangepicker.rtl {
direction: rtl;
text-align: right; }
.daterangepicker.rtl .calendar.left {
clear: right;
margin-left: 0; }
.daterangepicker.rtl .calendar.left .calendar-table {
border-left: none;
border-top-left-radius: 0;
border-bottom-left-radius: 0; }
.daterangepicker.rtl .calendar.right {
margin-right: 0; }
.daterangepicker.rtl .calendar.right .calendar-table {
border-right: none;
border-top-right-radius: 0;
border-bottom-right-radius: 0; }
.daterangepicker.rtl .left .daterangepicker_input {
padding-left: 12px; }
.daterangepicker.rtl .calendar.left .calendar-table {
padding-left: 12px; }
.daterangepicker.rtl .ranges, .daterangepicker.rtl .calendar {
text-align: right;
float: right; } }
@media (min-width: 730px) {
.daterangepicker .ranges {
width: auto; }
.daterangepicker.ltr .ranges {
float: left; }
.daterangepicker.rtl .ranges {
float: right; }
.daterangepicker .calendar.left {
clear: none !important; } }

Some files were not shown because too many files have changed in this diff Show More