Pre Merge pull request !19 from catoop/2.2.0-oracle

pull/19/MERGE
catoop 5 years ago committed by Gitee
commit 0791fe3900

25
.gitignore vendored

@ -1,7 +1,32 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**
!**/src/test/**
.idea .idea
### STS ###
.apt_generated
.classpath .classpath
.factorypath
.project .project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml *.iml
*.ipr
target/ target/
.DS_Store .DS_Store
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
.gitattributes .gitattributes
### VS Code ###
.vscode/

@ -8,7 +8,7 @@ use `xxl_job`;
SET NAMES utf8mb4; SET NAMES utf8mb4;
CREATE TABLE `xxl_job_info` ( CREATE TABLE `xxl_job_info` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`job_group` int(11) NOT NULL COMMENT '执行器主键ID', `job_group` int(11) NOT NULL COMMENT '执行器主键ID',
`job_cron` varchar(128) NOT NULL COMMENT '任务执行CRON', `job_cron` varchar(128) NOT NULL COMMENT '任务执行CRON',
`job_desc` varchar(255) NOT NULL, `job_desc` varchar(255) NOT NULL,
@ -34,7 +34,7 @@ CREATE TABLE `xxl_job_info` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_log` ( CREATE TABLE `xxl_job_log` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`job_group` int(11) NOT NULL COMMENT '执行器主键ID', `job_group` int(11) NOT NULL COMMENT '执行器主键ID',
`job_id` int(11) NOT NULL COMMENT '任务主键ID', `job_id` int(11) NOT NULL COMMENT '任务主键ID',
`executor_address` varchar(255) DEFAULT NULL COMMENT '执行器地址,本次执行的地址', `executor_address` varchar(255) DEFAULT NULL COMMENT '执行器地址,本次执行的地址',
@ -55,7 +55,7 @@ CREATE TABLE `xxl_job_log` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_log_report` ( CREATE TABLE `xxl_job_log_report` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`trigger_day` datetime DEFAULT NULL COMMENT '调度-时间', `trigger_day` datetime DEFAULT NULL COMMENT '调度-时间',
`running_count` int(11) NOT NULL DEFAULT '0' COMMENT '运行中-日志数量', `running_count` int(11) NOT NULL DEFAULT '0' COMMENT '运行中-日志数量',
`suc_count` int(11) NOT NULL DEFAULT '0' COMMENT '执行成功-日志数量', `suc_count` int(11) NOT NULL DEFAULT '0' COMMENT '执行成功-日志数量',
@ -65,7 +65,7 @@ CREATE TABLE `xxl_job_log_report` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_logglue` ( CREATE TABLE `xxl_job_logglue` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`job_id` int(11) NOT NULL COMMENT '任务主键ID', `job_id` int(11) NOT NULL COMMENT '任务主键ID',
`glue_type` varchar(50) DEFAULT NULL COMMENT 'GLUE类型', `glue_type` varchar(50) DEFAULT NULL COMMENT 'GLUE类型',
`glue_source` mediumtext COMMENT 'GLUE源代码', `glue_source` mediumtext COMMENT 'GLUE源代码',
@ -76,7 +76,7 @@ CREATE TABLE `xxl_job_logglue` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_registry` ( CREATE TABLE `xxl_job_registry` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`registry_group` varchar(50) NOT NULL, `registry_group` varchar(50) NOT NULL,
`registry_key` varchar(255) NOT NULL, `registry_key` varchar(255) NOT NULL,
`registry_value` varchar(255) NOT NULL, `registry_value` varchar(255) NOT NULL,
@ -86,7 +86,7 @@ CREATE TABLE `xxl_job_registry` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_group` ( CREATE TABLE `xxl_job_group` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`app_name` varchar(64) NOT NULL COMMENT '执行器AppName', `app_name` varchar(64) NOT NULL COMMENT '执行器AppName',
`title` varchar(12) NOT NULL COMMENT '执行器名称', `title` varchar(12) NOT NULL COMMENT '执行器名称',
`address_type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '执行器地址类型0=自动注册、1=手动录入', `address_type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '执行器地址类型0=自动注册、1=手动录入',
@ -95,7 +95,7 @@ CREATE TABLE `xxl_job_group` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_user` ( CREATE TABLE `xxl_job_user` (
`id` int(11) NOT NULL AUTO_INCREMENT, `id` bigint(20) NOT NULL
`username` varchar(50) NOT NULL COMMENT '账号', `username` varchar(50) NOT NULL COMMENT '账号',
`password` varchar(50) NOT NULL COMMENT '密码', `password` varchar(50) NOT NULL COMMENT '密码',
`role` tinyint(4) NOT NULL COMMENT '角色0-普通用户、1-管理员', `role` tinyint(4) NOT NULL COMMENT '角色0-普通用户、1-管理员',
@ -109,6 +109,15 @@ CREATE TABLE `xxl_job_lock` (
PRIMARY KEY (`lock_name`) PRIMARY KEY (`lock_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `xxl_job_machine` (
`machine_ip` varchar(22) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '主机IP',
`machine_id` int(4) NOT NULL COMMENT '主机IP对应的机器码',
`add_time` datetime NOT NULL COMMENT '创建时间',
`heart_last_time` datetime NOT NULL COMMENT '最后一次心跳时间',
PRIMARY KEY (`machine_ip`) USING BTREE,
UNIQUE INDEX `index_machine_id`(`machine_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
INSERT INTO `xxl_job_group`(`id`, `app_name`, `title`, `address_type`, `address_list`) VALUES (1, 'xxl-job-executor-sample', '示例执行器', 0, NULL); INSERT INTO `xxl_job_group`(`id`, `app_name`, `title`, `address_type`, `address_list`) VALUES (1, 'xxl-job-executor-sample', '示例执行器', 0, NULL);
INSERT INTO `xxl_job_info`(`id`, `job_group`, `job_cron`, `job_desc`, `add_time`, `update_time`, `author`, `alarm_email`, `executor_route_strategy`, `executor_handler`, `executor_param`, `executor_block_strategy`, `executor_timeout`, `executor_fail_retry_count`, `glue_type`, `glue_source`, `glue_remark`, `glue_updatetime`, `child_jobid`) VALUES (1, 1, '0 0 0 * * ? *', '测试任务1', '2018-11-03 22:21:31', '2018-11-03 22:21:31', 'XXL', '', 'FIRST', 'demoJobHandler', '', 'SERIAL_EXECUTION', 0, 0, 'BEAN', '', 'GLUE代码初始化', '2018-11-03 22:21:31', ''); INSERT INTO `xxl_job_info`(`id`, `job_group`, `job_cron`, `job_desc`, `add_time`, `update_time`, `author`, `alarm_email`, `executor_route_strategy`, `executor_handler`, `executor_param`, `executor_block_strategy`, `executor_timeout`, `executor_fail_retry_count`, `glue_type`, `glue_source`, `glue_remark`, `glue_updatetime`, `child_jobid`) VALUES (1, 1, '0 0 0 * * ? *', '测试任务1', '2018-11-03 22:21:31', '2018-11-03 22:21:31', 'XXL', '', 'FIRST', 'demoJobHandler', '', 'SERIAL_EXECUTION', 0, 0, 'BEAN', '', 'GLUE代码初始化', '2018-11-03 22:21:31', '');

@ -0,0 +1,296 @@
/*
Target Server Type : Oracle
Date: 29/06/2020 10:24:23
*/
-- ----------------------------
-- Table structure for XXL_JOB_GROUP
-- ----------------------------
DROP TABLE XXL_JOB_GROUP;
CREATE TABLE XXL_JOB_GROUP (
ID NUMBER(20) NOT NULL ,
APP_NAME NVARCHAR2(64) NOT NULL ,
TITLE NVARCHAR2(12) NOT NULL ,
ADDRESS_TYPE NUMBER(4) NOT NULL ,
ADDRESS_LIST NVARCHAR2(512)
);
COMMENT ON COLUMN XXL_JOB_GROUP.APP_NAME IS '执行器AppName';
COMMENT ON COLUMN XXL_JOB_GROUP.TITLE IS '执行器名称';
COMMENT ON COLUMN XXL_JOB_GROUP.ADDRESS_TYPE IS '执行器地址类型0=自动注册、1=手动录入';
COMMENT ON COLUMN XXL_JOB_GROUP.ADDRESS_LIST IS '执行器地址列表,多地址逗号分隔';
-- ----------------------------
-- Records of XXL_JOB_GROUP
-- ----------------------------
INSERT INTO XXL_JOB_GROUP VALUES ('1', 'xxl-job-executor-sample', '示例执行器', '0', NULL);
-- ----------------------------
-- Primary Key structure for table XXL_JOB_GROUP
-- ----------------------------
ALTER TABLE XXL_JOB_GROUP ADD CONSTRAINT SYS_C0026588 PRIMARY KEY (ID);
-- ----------------------------
-- Table structure for XXL_JOB_INFO
-- ----------------------------
DROP TABLE XXL_JOB_INFO;
CREATE TABLE XXL_JOB_INFO (
ID NUMBER(20) NOT NULL ,
JOB_GROUP NUMBER(11) NOT NULL ,
JOB_CRON NVARCHAR2(128) NOT NULL ,
JOB_DESC NVARCHAR2(255) NOT NULL ,
ADD_TIME DATE ,
UPDATE_TIME DATE ,
AUTHOR NVARCHAR2(64) ,
ALARM_EMAIL NVARCHAR2(255) ,
EXECUTOR_ROUTE_STRATEGY NVARCHAR2(50) ,
EXECUTOR_HANDLER NVARCHAR2(255) ,
EXECUTOR_PARAM NVARCHAR2(512) ,
EXECUTOR_BLOCK_STRATEGY NVARCHAR2(50) ,
EXECUTOR_TIMEOUT NUMBER(11) NOT NULL ,
EXECUTOR_FAIL_RETRY_COUNT NUMBER(11) NOT NULL ,
GLUE_TYPE NVARCHAR2(50) NOT NULL ,
GLUE_SOURCE NCLOB ,
GLUE_REMARK NVARCHAR2(128) ,
GLUE_UPDATETIME DATE ,
CHILD_JOBID NVARCHAR2(255) ,
TRIGGER_STATUS NUMBER(4) NOT NULL ,
TRIGGER_LAST_TIME NUMBER(20) NOT NULL ,
TRIGGER_NEXT_TIME NUMBER(20) NOT NULL
);
COMMENT ON COLUMN XXL_JOB_INFO.JOB_GROUP IS '执行器主键ID';
COMMENT ON COLUMN XXL_JOB_INFO.JOB_CRON IS '任务执行CRON';
COMMENT ON COLUMN XXL_JOB_INFO.AUTHOR IS '作者';
COMMENT ON COLUMN XXL_JOB_INFO.ALARM_EMAIL IS '报警邮件';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_ROUTE_STRATEGY IS '执行器路由策略';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_HANDLER IS '执行器任务handler';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_PARAM IS '执行器任务参数';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_BLOCK_STRATEGY IS '阻塞处理策略';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_TIMEOUT IS '任务执行超时时间,单位秒';
COMMENT ON COLUMN XXL_JOB_INFO.EXECUTOR_FAIL_RETRY_COUNT IS '失败重试次数';
COMMENT ON COLUMN XXL_JOB_INFO.GLUE_TYPE IS 'GLUE类型';
COMMENT ON COLUMN XXL_JOB_INFO.GLUE_SOURCE IS 'GLUE源代码';
COMMENT ON COLUMN XXL_JOB_INFO.GLUE_REMARK IS 'GLUE备注';
COMMENT ON COLUMN XXL_JOB_INFO.GLUE_UPDATETIME IS 'GLUE更新时间';
COMMENT ON COLUMN XXL_JOB_INFO.CHILD_JOBID IS '子任务ID多个逗号分隔';
COMMENT ON COLUMN XXL_JOB_INFO.TRIGGER_STATUS IS '调度状态0-停止1-运行';
COMMENT ON COLUMN XXL_JOB_INFO.TRIGGER_LAST_TIME IS '上次调度时间';
COMMENT ON COLUMN XXL_JOB_INFO.TRIGGER_NEXT_TIME IS '下次调度时间';
-- ----------------------------
-- Records of XXL_JOB_INFO
-- ----------------------------
INSERT INTO XXL_JOB_INFO VALUES ('1', '1', '0 0 0 * * ? *', '测试任务1', TO_DATE('2018-11-03 22:21:31', 'SYYYY-MM-DD HH24:MI:SS'), TO_DATE('2018-11-03 22:21:31', 'SYYYY-MM-DD HH24:MI:SS'), 'XXL', NULL, 'FIRST', 'demoJobHandler', NULL, 'SERIAL_EXECUTION', '0', '0', 'BEAN', NULL, 'GLUE代码初始化', TO_DATE('2018-11-03 22:21:31', 'SYYYY-MM-DD HH24:MI:SS'), NULL, '0', '0', '0');
-- ----------------------------
-- Primary Key structure for table XXL_JOB_INFO
-- ----------------------------
ALTER TABLE XXL_JOB_INFO ADD CONSTRAINT SYS_C0026589 PRIMARY KEY (ID);
-- ----------------------------
-- Table structure for XXL_JOB_LOCK
-- ----------------------------
DROP TABLE XXL_JOB_LOCK;
CREATE TABLE XXL_JOB_LOCK (
LOCK_NAME NVARCHAR2(50) NOT NULL
);
COMMENT ON COLUMN XXL_JOB_LOCK.LOCK_NAME IS '锁名称';
-- ----------------------------
-- Records of XXL_JOB_LOCK
-- ----------------------------
INSERT INTO XXL_JOB_LOCK VALUES ('schedule_lock');
-- ----------------------------
-- Table structure for XXL_JOB_LOG
-- ----------------------------
DROP TABLE XXL_JOB_LOG;
CREATE TABLE XXL_JOB_LOG (
ID NUMBER(20) NOT NULL ,
JOB_GROUP NUMBER(11) NOT NULL ,
JOB_ID NUMBER(11) NOT NULL ,
EXECUTOR_ADDRESS NVARCHAR2(255) ,
EXECUTOR_HANDLER NVARCHAR2(255) ,
EXECUTOR_PARAM NVARCHAR2(512) ,
EXECUTOR_SHARDING_PARAM NVARCHAR2(20) ,
EXECUTOR_FAIL_RETRY_COUNT NUMBER(11) NOT NULL ,
TRIGGER_TIME DATE ,
TRIGGER_CODE NUMBER(11) NOT NULL ,
TRIGGER_MSG NCLOB ,
HANDLE_TIME DATE ,
HANDLE_CODE NUMBER(11) NOT NULL ,
HANDLE_MSG NCLOB ,
ALARM_STATUS NUMBER(4) NOT NULL
);
COMMENT ON COLUMN XXL_JOB_LOG.JOB_GROUP IS '执行器主键ID';
COMMENT ON COLUMN XXL_JOB_LOG.JOB_ID IS '任务主键ID';
COMMENT ON COLUMN XXL_JOB_LOG.EXECUTOR_ADDRESS IS '执行器地址,本次执行的地址';
COMMENT ON COLUMN XXL_JOB_LOG.EXECUTOR_HANDLER IS '执行器任务handler';
COMMENT ON COLUMN XXL_JOB_LOG.EXECUTOR_PARAM IS '执行器任务参数';
COMMENT ON COLUMN XXL_JOB_LOG.EXECUTOR_SHARDING_PARAM IS '执行器任务分片参数,格式如 1/2';
COMMENT ON COLUMN XXL_JOB_LOG.EXECUTOR_FAIL_RETRY_COUNT IS '失败重试次数';
COMMENT ON COLUMN XXL_JOB_LOG.TRIGGER_TIME IS '调度-时间';
COMMENT ON COLUMN XXL_JOB_LOG.TRIGGER_CODE IS '调度-结果';
COMMENT ON COLUMN XXL_JOB_LOG.TRIGGER_MSG IS '调度-日志';
COMMENT ON COLUMN XXL_JOB_LOG.HANDLE_TIME IS '执行-时间';
COMMENT ON COLUMN XXL_JOB_LOG.HANDLE_CODE IS '执行-状态';
COMMENT ON COLUMN XXL_JOB_LOG.HANDLE_MSG IS '执行-日志';
COMMENT ON COLUMN XXL_JOB_LOG.ALARM_STATUS IS '告警状态0-默认、1-无需告警、2-告警成功、3-告警失败';
-- ----------------------------
-- Primary Key structure for table XXL_JOB_LOG
-- ----------------------------
ALTER TABLE XXL_JOB_LOG ADD CONSTRAINT SYS_C0026591 PRIMARY KEY (ID);
-- ----------------------------
-- Indexes structure for table XXL_JOB_LOG
-- ----------------------------
CREATE INDEX I_HANDLE_CODE
ON XXL_JOB_LOG (HANDLE_CODE ASC);
CREATE INDEX I_TRIGGER_TIME
ON XXL_JOB_LOG (TRIGGER_TIME ASC);
-- ----------------------------
-- Table structure for XXL_JOB_LOG_REPORT
-- ----------------------------
DROP TABLE XXL_JOB_LOG_REPORT;
CREATE TABLE XXL_JOB_LOG_REPORT (
ID NUMBER(20) NOT NULL ,
TRIGGER_DAY DATE ,
RUNNING_COUNT NUMBER(11) NOT NULL ,
SUC_COUNT NUMBER(11) NOT NULL ,
FAIL_COUNT NUMBER(11) NOT NULL
);
COMMENT ON COLUMN XXL_JOB_LOG_REPORT.TRIGGER_DAY IS '调度-时间';
COMMENT ON COLUMN XXL_JOB_LOG_REPORT.RUNNING_COUNT IS '运行中-日志数量';
COMMENT ON COLUMN XXL_JOB_LOG_REPORT.SUC_COUNT IS '执行成功-日志数量';
COMMENT ON COLUMN XXL_JOB_LOG_REPORT.FAIL_COUNT IS '执行失败-日志数量';
-- ----------------------------
-- Primary Key structure for table XXL_JOB_LOG_REPORT
-- ----------------------------
ALTER TABLE XXL_JOB_LOG_REPORT ADD CONSTRAINT SYS_C0026592 PRIMARY KEY (ID);
-- ----------------------------
-- Indexes structure for table XXL_JOB_LOG_REPORT
-- ----------------------------
CREATE UNIQUE INDEX I_TRIGGER_DAY
ON XXL_JOB_LOG_REPORT (TRIGGER_DAY ASC);
-- ----------------------------
-- Table structure for XXL_JOB_LOGGLUE
-- ----------------------------
DROP TABLE XXL_JOB_LOGGLUE;
CREATE TABLE XXL_JOB_LOGGLUE (
ID NUMBER(20) NOT NULL ,
JOB_ID NUMBER(20) NOT NULL ,
GLUE_TYPE NVARCHAR2(50) ,
GLUE_SOURCE NCLOB ,
GLUE_REMARK NVARCHAR2(128) NOT NULL ,
ADD_TIME DATE ,
UPDATE_TIME DATE
);
COMMENT ON COLUMN XXL_JOB_LOGGLUE.JOB_ID IS '任务主键ID';
COMMENT ON COLUMN XXL_JOB_LOGGLUE.GLUE_TYPE IS 'GLUE类型';
COMMENT ON COLUMN XXL_JOB_LOGGLUE.GLUE_SOURCE IS 'GLUE源代码';
COMMENT ON COLUMN XXL_JOB_LOGGLUE.GLUE_REMARK IS 'GLUE备注';
-- ----------------------------
-- Primary Key structure for table XXL_JOB_LOGGLUE
-- ----------------------------
ALTER TABLE XXL_JOB_LOGGLUE ADD CONSTRAINT SYS_C0026593 PRIMARY KEY (ID);
-- ----------------------------
-- Table structure for XXL_JOB_REGISTRY
-- ----------------------------
DROP TABLE XXL_JOB_REGISTRY;
CREATE TABLE XXL_JOB_REGISTRY (
ID NUMBER(20) NOT NULL ,
REGISTRY_GROUP NVARCHAR2(50) NOT NULL ,
REGISTRY_KEY NVARCHAR2(255) NOT NULL ,
REGISTRY_VALUE NVARCHAR2(255) NOT NULL ,
UPDATE_TIME DATE
);
-- ----------------------------
-- Primary Key structure for table XXL_JOB_REGISTRY
-- ----------------------------
ALTER TABLE XXL_JOB_REGISTRY ADD CONSTRAINT SYS_C0026594 PRIMARY KEY (ID);
-- ----------------------------
-- Indexes structure for table XXL_JOB_REGISTRY
-- ----------------------------
CREATE INDEX I_G_K_V
ON XXL_JOB_REGISTRY (REGISTRY_GROUP ASC, REGISTRY_VALUE ASC, REGISTRY_KEY ASC);
-- ----------------------------
-- Table structure for XXL_JOB_USER
-- ----------------------------
DROP TABLE XXL_JOB_USER;
CREATE TABLE XXL_JOB_USER (
ID NUMBER(20) NOT NULL ,
USERNAME NVARCHAR2(50) NOT NULL ,
PASSWORD NVARCHAR2(50) NOT NULL ,
ROLE NUMBER(4) NOT NULL ,
PERMISSION NVARCHAR2(255)
);
COMMENT ON COLUMN XXL_JOB_USER.USERNAME IS '账号';
COMMENT ON COLUMN XXL_JOB_USER.PASSWORD IS '密码';
COMMENT ON COLUMN XXL_JOB_USER.ROLE IS '角色0-普通用户、1-管理员';
COMMENT ON COLUMN XXL_JOB_USER.PERMISSION IS '权限执行器ID列表多个逗号分割';
-- ----------------------------
-- Records of XXL_JOB_USER
-- ----------------------------
INSERT INTO XXL_JOB_USER VALUES ('1', 'admin', 'e10adc3949ba59abbe56e057f20f883e', '1', NULL);
-- ----------------------------
-- Primary Key structure for table XXL_JOB_USER
-- ----------------------------
ALTER TABLE XXL_JOB_USER ADD CONSTRAINT SYS_C0026595 PRIMARY KEY (ID);
-- ----------------------------
-- Indexes structure for table XXL_JOB_USER
-- ----------------------------
CREATE UNIQUE INDEX I_USERNAME
ON XXL_JOB_USER (USERNAME ASC);
-- ----------------------------
-- Table structure for XXL_JOB_MACHINE
-- ----------------------------
DROP TABLE XXL_JOB_MACHINE;
CREATE TABLE XXL_JOB_MACHINE (
MACHINE_IP NVARCHAR2(22) NOT NULL ,
MACHINE_ID NUMBER(4) NOT NULL ,
ADD_TIME DATE NOT NULL ,
HEART_LAST_TIME DATE NOT NULL
);
COMMENT ON COLUMN XXL_JOB_MACHINE.MACHINE_IP IS '主机IP';
COMMENT ON COLUMN XXL_JOB_MACHINE.MACHINE_ID IS '主机IP对应的机器码';
COMMENT ON COLUMN XXL_JOB_MACHINE.ADD_TIME IS '创建时间';
COMMENT ON COLUMN XXL_JOB_MACHINE.HEART_LAST_TIME IS '最后一次心跳时间';
-- ----------------------------
-- Primary Key structure for table XXL_JOB_MACHINE
-- ----------------------------
ALTER TABLE XXL_JOB_MACHINE ADD CONSTRAINT SYS_C0026684 PRIMARY KEY (MACHINE_IP);
CREATE UNIQUE INDEX I_MACHINE_ID
ON XXL_JOB_MACHINE (MACHINE_ID ASC);
-- ----------------------------
-- Checks structure for table XXL_JOB_MACHINE
-- ----------------------------
ALTER TABLE XXL_JOB_MACHINE ADD CONSTRAINT SYS_C0026680 CHECK (MACHINE_IP IS NOT NULL) NOT DEFERRABLE INITIALLY IMMEDIATE NORELY VALIDATE;
ALTER TABLE XXL_JOB_MACHINE ADD CONSTRAINT SYS_C0026681 CHECK (MACHINE_ID IS NOT NULL) NOT DEFERRABLE INITIALLY IMMEDIATE NORELY VALIDATE;
ALTER TABLE XXL_JOB_MACHINE ADD CONSTRAINT SYS_C0026682 CHECK (ADD_TIME IS NOT NULL) NOT DEFERRABLE INITIALLY IMMEDIATE NORELY VALIDATE;
ALTER TABLE XXL_JOB_MACHINE ADD CONSTRAINT SYS_C0026683 CHECK (HEART_LAST_TIME IS NOT NULL) NOT DEFERRABLE INITIALLY IMMEDIATE NORELY VALIDATE;

@ -32,6 +32,7 @@
<mybatis-spring-boot-starter.version>2.1.2</mybatis-spring-boot-starter.version> <mybatis-spring-boot-starter.version>2.1.2</mybatis-spring-boot-starter.version>
<mysql-connector-java.version>8.0.19</mysql-connector-java.version> <mysql-connector-java.version>8.0.19</mysql-connector-java.version>
<mybatis.pagehelper.version>1.2.12</mybatis.pagehelper.version>
<slf4j-api.version>1.7.30</slf4j-api.version> <slf4j-api.version>1.7.30</slf4j-api.version>
<junit.version>4.13</junit.version> <junit.version>4.13</junit.version>

@ -66,6 +66,13 @@
<version>${mysql-connector-java.version}</version> <version>${mysql-connector-java.version}</version>
</dependency> </dependency>
<!-- oracle -->
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc7</artifactId>
<version>12.1.0.2</version>
</dependency>
<!-- xxl-job-core --> <!-- xxl-job-core -->
<dependency> <dependency>
<groupId>com.xuxueli</groupId> <groupId>com.xuxueli</groupId>

@ -1,22 +1,26 @@
package com.xxl.job.admin.controller; package com.xxl.job.admin.controller;
import java.util.List;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.xxl.job.admin.controller.annotation.PermissionLimit; import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig; import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.service.XxlJobService;
import com.xxl.job.core.biz.AdminBiz; import com.xxl.job.core.biz.AdminBiz;
import com.xxl.job.core.biz.model.HandleCallbackParam; import com.xxl.job.core.biz.model.HandleCallbackParam;
import com.xxl.job.core.biz.model.RegistryParam; import com.xxl.job.core.biz.model.RegistryParam;
import com.xxl.job.core.biz.model.ReturnT; import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.util.GsonTool; import com.xxl.job.core.util.GsonTool;
import com.xxl.job.core.util.XxlJobRemotingUtil; import com.xxl.job.core.util.XxlJobRemotingUtil;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/** /**
* Created by xuxueli on 17/5/10. * Created by xuxueli on 17/5/10.
@ -28,6 +32,9 @@ public class JobApiController {
@Resource @Resource
private AdminBiz adminBiz; private AdminBiz adminBiz;
@Resource
private XxlJobService xxlJobService;
/** /**
* api * api
* *
@ -63,6 +70,12 @@ public class JobApiController {
} else if ("registryRemove".equals(uri)) { } else if ("registryRemove".equals(uri)) {
RegistryParam registryParam = GsonTool.fromJson(data, RegistryParam.class); RegistryParam registryParam = GsonTool.fromJson(data, RegistryParam.class);
return adminBiz.registryRemove(registryParam); return adminBiz.registryRemove(registryParam);
} else if ("jobInfoAdd".equals(uri)) {
XxlJobInfo jobInfo = GsonTool.fromJson(data, XxlJobInfo.class);
return xxlJobService.add(jobInfo);
} else if ("jobInfoRemove".equals(uri)) {
XxlJobInfo jobInfo = GsonTool.fromJson(data, XxlJobInfo.class);
return xxlJobService.remove(jobInfo.getId());
} else { } else {
return new ReturnT<String>(ReturnT.FAIL_CODE, "invalid request, uri-mapping("+ uri +") not found."); return new ReturnT<String>(ReturnT.FAIL_CODE, "invalid request, uri-mapping("+ uri +") not found.");
} }

@ -1,5 +1,7 @@
package com.xxl.job.admin.controller; package com.xxl.job.admin.controller;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLogGlue; import com.xxl.job.admin.core.model.XxlJobLogGlue;
import com.xxl.job.admin.core.util.I18nUtil; import com.xxl.job.admin.core.util.I18nUtil;
@ -30,8 +32,11 @@ public class JobCodeController {
@Resource @Resource
private XxlJobLogGlueDao xxlJobLogGlueDao; private XxlJobLogGlueDao xxlJobLogGlueDao;
@Resource
private GenerateId generateId;
@RequestMapping @RequestMapping
public String index(HttpServletRequest request, Model model, int jobId) { public String index(HttpServletRequest request, Model model, long jobId) {
XxlJobInfo jobInfo = xxlJobInfoDao.loadById(jobId); XxlJobInfo jobInfo = xxlJobInfoDao.loadById(jobId);
List<XxlJobLogGlue> jobLogGlues = xxlJobLogGlueDao.findByJobId(jobId); List<XxlJobLogGlue> jobLogGlues = xxlJobLogGlueDao.findByJobId(jobId);
@ -55,7 +60,7 @@ public class JobCodeController {
@RequestMapping("/save") @RequestMapping("/save")
@ResponseBody @ResponseBody
public ReturnT<String> save(Model model, int id, String glueSource, String glueRemark) { public ReturnT<String> save(Model model, long id, String glueSource, String glueRemark) {
// valid // valid
if (glueRemark==null) { if (glueRemark==null) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")) ); return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")) );
@ -85,10 +90,13 @@ public class JobCodeController {
xxlJobLogGlue.setAddTime(new Date()); xxlJobLogGlue.setAddTime(new Date());
xxlJobLogGlue.setUpdateTime(new Date()); xxlJobLogGlue.setUpdateTime(new Date());
xxlJobLogGlue.setId(generateId.getId());
xxlJobLogGlueDao.save(xxlJobLogGlue); xxlJobLogGlueDao.save(xxlJobLogGlue);
// remove code backup more than 30 // remove code backup more than 30
xxlJobLogGlueDao.removeOld(exists_jobInfo.getId(), 30); PageHelper.startPage(1,30);
List<Long> ids = xxlJobLogGlueDao.findIds(exists_jobInfo.getId());
xxlJobLogGlueDao.removeOld(exists_jobInfo.getId(), ids);
return ReturnT.SUCCESS; return ReturnT.SUCCESS;
} }

@ -1,5 +1,7 @@
package com.xxl.job.admin.controller; package com.xxl.job.admin.controller;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobRegistry; import com.xxl.job.admin.core.model.XxlJobRegistry;
import com.xxl.job.admin.core.util.I18nUtil; import com.xxl.job.admin.core.util.I18nUtil;
@ -8,6 +10,7 @@ import com.xxl.job.admin.dao.XxlJobInfoDao;
import com.xxl.job.admin.dao.XxlJobRegistryDao; import com.xxl.job.admin.dao.XxlJobRegistryDao;
import com.xxl.job.core.biz.model.ReturnT; import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.enums.RegistryConfig; import com.xxl.job.core.enums.RegistryConfig;
import com.xxl.job.core.util.DateUtil;
import org.springframework.stereotype.Controller; import org.springframework.stereotype.Controller;
import org.springframework.ui.Model; import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
@ -32,6 +35,8 @@ public class JobGroupController {
public XxlJobGroupDao xxlJobGroupDao; public XxlJobGroupDao xxlJobGroupDao;
@Resource @Resource
private XxlJobRegistryDao xxlJobRegistryDao; private XxlJobRegistryDao xxlJobRegistryDao;
@Resource
private GenerateId generateId;
@RequestMapping @RequestMapping
public String index(Model model) { public String index(Model model) {
@ -45,9 +50,10 @@ public class JobGroupController {
@RequestParam(required = false, defaultValue = "10") int length, @RequestParam(required = false, defaultValue = "10") int length,
String appname, String title) { String appname, String title) {
PageHelper.startPage(start/length+1,length);
// page query // page query
List<XxlJobGroup> list = xxlJobGroupDao.pageList(start, length, appname, title); List<XxlJobGroup> list = xxlJobGroupDao.pageList(appname, title);
int list_count = xxlJobGroupDao.pageListCount(start, length, appname, title); int list_count = xxlJobGroupDao.pageListCount(appname, title);
// package result // package result
Map<String, Object> maps = new HashMap<String, Object>(); Map<String, Object> maps = new HashMap<String, Object>();
@ -82,7 +88,7 @@ public class JobGroupController {
} }
} }
} }
xxlJobGroup.setId(generateId.getId());
int ret = xxlJobGroupDao.save(xxlJobGroup); int ret = xxlJobGroupDao.save(xxlJobGroup);
return (ret>0)?ReturnT.SUCCESS:ReturnT.FAIL; return (ret>0)?ReturnT.SUCCESS:ReturnT.FAIL;
} }
@ -132,7 +138,8 @@ public class JobGroupController {
private List<String> findRegistryByAppName(String appnameParam){ private List<String> findRegistryByAppName(String appnameParam){
HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>(); HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>();
List<XxlJobRegistry> list = xxlJobRegistryDao.findAll(RegistryConfig.DEAD_TIMEOUT, new Date()); Date date = DateUtil.addSecond(new Date(), -RegistryConfig.DEAD_TIMEOUT);
List<XxlJobRegistry> list = xxlJobRegistryDao.findAll(date);
if (list != null) { if (list != null) {
for (XxlJobRegistry item: list) { for (XxlJobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) { if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
@ -154,10 +161,11 @@ public class JobGroupController {
@RequestMapping("/remove") @RequestMapping("/remove")
@ResponseBody @ResponseBody
public ReturnT<String> remove(int id){ public ReturnT<String> remove(long id){
// valid // valid
int count = xxlJobInfoDao.pageListCount(0, 10, id, -1, null, null, null); PageHelper.startPage(1,10);
int count = xxlJobInfoDao.pageListCount(id, -1, null, null, null);
if (count > 0) { if (count > 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_del_limit_0") ); return new ReturnT<String>(500, I18nUtil.getString("jobgroup_del_limit_0") );
} }
@ -173,7 +181,7 @@ public class JobGroupController {
@RequestMapping("/loadById") @RequestMapping("/loadById")
@ResponseBody @ResponseBody
public ReturnT<XxlJobGroup> loadById(int id){ public ReturnT<XxlJobGroup> loadById(long id){
XxlJobGroup jobGroup = xxlJobGroupDao.load(id); XxlJobGroup jobGroup = xxlJobGroupDao.load(id);
return jobGroup!=null?new ReturnT<XxlJobGroup>(jobGroup):new ReturnT<XxlJobGroup>(ReturnT.FAIL_CODE, null); return jobGroup!=null?new ReturnT<XxlJobGroup>(jobGroup):new ReturnT<XxlJobGroup>(ReturnT.FAIL_CODE, null);
} }

@ -41,7 +41,7 @@ public class JobInfoController {
private XxlJobService xxlJobService; private XxlJobService xxlJobService;
@RequestMapping @RequestMapping
public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "-1") int jobGroup) { public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "-1") long jobGroup) {
// 枚举-字典 // 枚举-字典
model.addAttribute("ExecutorRouteStrategyEnum", ExecutorRouteStrategyEnum.values()); // 路由策略-列表 model.addAttribute("ExecutorRouteStrategyEnum", ExecutorRouteStrategyEnum.values()); // 路由策略-列表
@ -83,7 +83,7 @@ public class JobInfoController {
} }
return jobGroupList; return jobGroupList;
} }
public static void validPermission(HttpServletRequest request, int jobGroup) { public static void validPermission(HttpServletRequest request, long jobGroup) {
XxlJobUser loginUser = (XxlJobUser) request.getAttribute(LoginService.LOGIN_IDENTITY_KEY); XxlJobUser loginUser = (XxlJobUser) request.getAttribute(LoginService.LOGIN_IDENTITY_KEY);
if (!loginUser.validPermission(jobGroup)) { if (!loginUser.validPermission(jobGroup)) {
throw new RuntimeException(I18nUtil.getString("system_permission_limit") + "[username="+ loginUser.getUsername() +"]"); throw new RuntimeException(I18nUtil.getString("system_permission_limit") + "[username="+ loginUser.getUsername() +"]");
@ -94,7 +94,7 @@ public class JobInfoController {
@ResponseBody @ResponseBody
public Map<String, Object> pageList(@RequestParam(required = false, defaultValue = "0") int start, public Map<String, Object> pageList(@RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length, @RequestParam(required = false, defaultValue = "10") int length,
int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) { long jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) {
return xxlJobService.pageList(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author); return xxlJobService.pageList(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author);
} }
@ -113,26 +113,26 @@ public class JobInfoController {
@RequestMapping("/remove") @RequestMapping("/remove")
@ResponseBody @ResponseBody
public ReturnT<String> remove(int id) { public ReturnT<String> remove(long id) {
return xxlJobService.remove(id); return xxlJobService.remove(id);
} }
@RequestMapping("/stop") @RequestMapping("/stop")
@ResponseBody @ResponseBody
public ReturnT<String> pause(int id) { public ReturnT<String> pause(long id) {
return xxlJobService.stop(id); return xxlJobService.stop(id);
} }
@RequestMapping("/start") @RequestMapping("/start")
@ResponseBody @ResponseBody
public ReturnT<String> start(int id) { public ReturnT<String> start(long id) {
return xxlJobService.start(id); return xxlJobService.start(id);
} }
@RequestMapping("/trigger") @RequestMapping("/trigger")
@ResponseBody @ResponseBody
//@PermissionLimit(limit = false) //@PermissionLimit(limit = false)
public ReturnT<String> triggerJob(int id, String executorParam, String addressList) { public ReturnT<String> triggerJob(long id, String executorParam, String addressList) {
// force cover job param // force cover job param
if (executorParam == null) { if (executorParam == null) {
executorParam = ""; executorParam = "";

@ -1,5 +1,6 @@
package com.xxl.job.admin.controller; package com.xxl.job.admin.controller;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.exception.XxlJobException; import com.xxl.job.admin.core.exception.XxlJobException;
import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
@ -47,7 +48,7 @@ public class JobLogController {
public XxlJobLogDao xxlJobLogDao; public XxlJobLogDao xxlJobLogDao;
@RequestMapping @RequestMapping
public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "0") Integer jobId) { public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "0") Long jobId) {
// 执行器列表 // 执行器列表
List<XxlJobGroup> jobGroupList_all = xxlJobGroupDao.findAll(); List<XxlJobGroup> jobGroupList_all = xxlJobGroupDao.findAll();
@ -78,7 +79,7 @@ public class JobLogController {
@RequestMapping("/getJobsByGroup") @RequestMapping("/getJobsByGroup")
@ResponseBody @ResponseBody
public ReturnT<List<XxlJobInfo>> getJobsByGroup(int jobGroup){ public ReturnT<List<XxlJobInfo>> getJobsByGroup(long jobGroup){
List<XxlJobInfo> list = xxlJobInfoDao.getJobsByGroup(jobGroup); List<XxlJobInfo> list = xxlJobInfoDao.getJobsByGroup(jobGroup);
return new ReturnT<List<XxlJobInfo>>(list); return new ReturnT<List<XxlJobInfo>>(list);
} }
@ -88,7 +89,7 @@ public class JobLogController {
public Map<String, Object> pageList(HttpServletRequest request, public Map<String, Object> pageList(HttpServletRequest request,
@RequestParam(required = false, defaultValue = "0") int start, @RequestParam(required = false, defaultValue = "0") int start,
@RequestParam(required = false, defaultValue = "10") int length, @RequestParam(required = false, defaultValue = "10") int length,
int jobGroup, int jobId, int logStatus, String filterTime) { long jobGroup, long jobId, int logStatus, String filterTime) {
// valid permission // valid permission
JobInfoController.validPermission(request, jobGroup); // 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup JobInfoController.validPermission(request, jobGroup); // 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup
@ -105,8 +106,9 @@ public class JobLogController {
} }
// page query // page query
List<XxlJobLog> list = xxlJobLogDao.pageList(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); PageHelper.startPage(start/length+1,length);
int list_count = xxlJobLogDao.pageListCount(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); List<XxlJobLog> list = xxlJobLogDao.pageList(jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
int list_count = xxlJobLogDao.pageListCount(jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
// package result // package result
Map<String, Object> maps = new HashMap<String, Object>(); Map<String, Object> maps = new HashMap<String, Object>();
@ -117,7 +119,7 @@ public class JobLogController {
} }
@RequestMapping("/logDetailPage") @RequestMapping("/logDetailPage")
public String logDetailPage(int id, Model model){ public String logDetailPage(long id, Model model){
// base check // base check
ReturnT<String> logStatue = ReturnT.SUCCESS; ReturnT<String> logStatue = ReturnT.SUCCESS;
@ -158,7 +160,7 @@ public class JobLogController {
@RequestMapping("/logKill") @RequestMapping("/logKill")
@ResponseBody @ResponseBody
public ReturnT<String> logKill(int id){ public ReturnT<String> logKill(long id){
// base check // base check
XxlJobLog log = xxlJobLogDao.load(id); XxlJobLog log = xxlJobLogDao.load(id);
XxlJobInfo jobInfo = xxlJobInfoDao.loadById(log.getJobId()); XxlJobInfo jobInfo = xxlJobInfoDao.loadById(log.getJobId());
@ -192,7 +194,7 @@ public class JobLogController {
@RequestMapping("/clearLog") @RequestMapping("/clearLog")
@ResponseBody @ResponseBody
public ReturnT<String> clearLog(int jobGroup, int jobId, int type){ public ReturnT<String> clearLog(long jobGroup, long jobId, int type){
Date clearBeforeTime = null; Date clearBeforeTime = null;
int clearBeforeNum = 0; int clearBeforeNum = 0;
@ -214,13 +216,20 @@ public class JobLogController {
clearBeforeNum = 100000; // 清理十万条以前日志数据 clearBeforeNum = 100000; // 清理十万条以前日志数据
} else if (type == 9) { } else if (type == 9) {
clearBeforeNum = 0; // 清理所有日志数据 clearBeforeNum = 0; // 清理所有日志数据
} else { } else {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_unvalid")); return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_unvalid"));
} }
List<Long> logIds = null; List<Long> logIds = null;
List<Long> recentLogIds = null;
do { do {
logIds = xxlJobLogDao.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000); if(clearBeforeNum > 0){
PageHelper.startPage(1,clearBeforeNum);
recentLogIds = xxlJobLogDao.findRecentLogs(jobGroup,jobId);
}
PageHelper.startPage(1,1000);
logIds = xxlJobLogDao.findClearLogIds(jobGroup, jobId, clearBeforeTime,recentLogIds);
if (logIds!=null && logIds.size()>0) { if (logIds!=null && logIds.size()>0) {
xxlJobLogDao.clearLog(logIds); xxlJobLogDao.clearLog(logIds);
} }

@ -1,10 +1,14 @@
package com.xxl.job.admin.controller; package com.xxl.job.admin.controller;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.controller.annotation.PermissionLimit; import com.xxl.job.admin.controller.annotation.PermissionLimit;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobLogGlue;
import com.xxl.job.admin.core.model.XxlJobUser; import com.xxl.job.admin.core.model.XxlJobUser;
import com.xxl.job.admin.core.util.I18nUtil; import com.xxl.job.admin.core.util.I18nUtil;
import com.xxl.job.admin.dao.XxlJobGroupDao; import com.xxl.job.admin.dao.XxlJobGroupDao;
import com.xxl.job.admin.dao.XxlJobLogGlueDao;
import com.xxl.job.admin.dao.XxlJobUserDao; import com.xxl.job.admin.dao.XxlJobUserDao;
import com.xxl.job.admin.service.LoginService; import com.xxl.job.admin.service.LoginService;
import com.xxl.job.core.biz.model.ReturnT; import com.xxl.job.core.biz.model.ReturnT;
@ -18,6 +22,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource; import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -33,6 +38,10 @@ public class UserController {
private XxlJobUserDao xxlJobUserDao; private XxlJobUserDao xxlJobUserDao;
@Resource @Resource
private XxlJobGroupDao xxlJobGroupDao; private XxlJobGroupDao xxlJobGroupDao;
@Resource
private GenerateId generateId;
@Resource
private XxlJobLogGlueDao xxlJobLogGlueDao;
@RequestMapping @RequestMapping
@PermissionLimit(adminuser = true) @PermissionLimit(adminuser = true)
@ -52,9 +61,11 @@ public class UserController {
@RequestParam(required = false, defaultValue = "10") int length, @RequestParam(required = false, defaultValue = "10") int length,
String username, int role) { String username, int role) {
PageHelper.startPage(start/length+1,length);
// page list // page list
List<XxlJobUser> list = xxlJobUserDao.pageList(start, length, username, role); List<XxlJobUser> list = xxlJobUserDao.pageList(username, role);
int list_count = xxlJobUserDao.pageListCount(start, length, username, role);
int list_count = xxlJobUserDao.pageListCount(username, role);
// package result // package result
Map<String, Object> maps = new HashMap<String, Object>(); Map<String, Object> maps = new HashMap<String, Object>();
@ -94,6 +105,8 @@ public class UserController {
return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("user_username_repeat") ); return new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("user_username_repeat") );
} }
xxlJobUser.setId(generateId.getId());
// write // write
xxlJobUserDao.save(xxlJobUser); xxlJobUserDao.save(xxlJobUser);
return ReturnT.SUCCESS; return ReturnT.SUCCESS;
@ -130,7 +143,7 @@ public class UserController {
@RequestMapping("/remove") @RequestMapping("/remove")
@ResponseBody @ResponseBody
@PermissionLimit(adminuser = true) @PermissionLimit(adminuser = true)
public ReturnT<String> remove(HttpServletRequest request, int id) { public ReturnT<String> remove(HttpServletRequest request, long id) {
// avoid opt login seft // avoid opt login seft
XxlJobUser loginUser = (XxlJobUser) request.getAttribute(LoginService.LOGIN_IDENTITY_KEY); XxlJobUser loginUser = (XxlJobUser) request.getAttribute(LoginService.LOGIN_IDENTITY_KEY);

@ -48,7 +48,7 @@ public class EmailJobAlarm implements JobAlarm {
} }
// email info // email info
XxlJobGroup group = XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().load(Integer.valueOf(info.getJobGroup())); XxlJobGroup group = XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().load(info.getJobGroup());
String personal = I18nUtil.getString("admin_name_full"); String personal = I18nUtil.getString("admin_name_full");
String title = I18nUtil.getString("jobconf_monitor"); String title = I18nUtil.getString("jobconf_monitor");
String content = MessageFormat.format(loadEmailJobAlarmTemplate(), String content = MessageFormat.format(loadEmailJobAlarmTemplate(),

@ -0,0 +1,27 @@
package com.xxl.job.admin.core.conf;
import java.math.BigDecimal;
import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
/**
* JsonLongBigDecimaljs
*
* @author
*
*/
@Configuration
public class JacksonConfigurer {
@Bean
public Jackson2ObjectMapperBuilderCustomizer jackson2ObjectMapperBuilderCustomizer() {
return builder -> {
builder.serializerByType(BigDecimal.class, ToStringSerializer.instance);
builder.serializerByType(Long.TYPE, ToStringSerializer.instance);
};
}
}

@ -1,6 +1,7 @@
package com.xxl.job.admin.core.conf; package com.xxl.job.admin.core.conf;
import com.xxl.job.admin.core.alarm.JobAlarmer; import com.xxl.job.admin.core.alarm.JobAlarmer;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.scheduler.XxlJobScheduler; import com.xxl.job.admin.core.scheduler.XxlJobScheduler;
import com.xxl.job.admin.dao.*; import com.xxl.job.admin.dao.*;
import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.DisposableBean;
@ -85,6 +86,8 @@ public class XxlJobAdminConfig implements InitializingBean, DisposableBean {
private DataSource dataSource; private DataSource dataSource;
@Resource @Resource
private JobAlarmer jobAlarmer; private JobAlarmer jobAlarmer;
@Resource
private GenerateId generateId;
public String getI18n() { public String getI18n() {
@ -155,4 +158,7 @@ public class XxlJobAdminConfig implements InitializingBean, DisposableBean {
return jobAlarmer; return jobAlarmer;
} }
public GenerateId getGenerateId() {
return generateId;
}
} }

@ -0,0 +1,45 @@
package com.xxl.job.admin.core.id;
import com.xxl.job.admin.core.id.service.MachineService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
@Component
public class GenerateId {
private final Logger logger = LoggerFactory.getLogger(GenerateId.class);
private SnowflakeIdWorker idWorker = null;
@Autowired
private MachineService machineService;
private Integer machineId = -1;
public Long getId() {
return idWorker.nextId();
}
@PostConstruct
private void getIdBefore() {
//只需要第一次调用 对idworker进行初始化
machineId = machineService.getInitMachineId();
idWorker = new SnowflakeIdWorker(machineId);
}
public Integer getMachineId(){
return this.machineId;
}
public void setMachineId(Integer machineId){
this.machineId = machineId;
}
public void setIdWorker(SnowflakeIdWorker snowflakeIdWorker){
this.idWorker = snowflakeIdWorker;
}
}

@ -0,0 +1,33 @@
package com.xxl.job.admin.core.id;
import java.util.Date;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import com.xxl.job.admin.core.id.service.MachineService;
import com.xxl.job.admin.core.model.XxlJobMachine;
import com.xxl.job.admin.core.util.MachineUtils;
@Component
@EnableScheduling
public class HeartBeat {
@Value("${server.port}")
private String serverPort;
@Autowired
private MachineService machineService;
@Scheduled(fixedDelay = 10000)
public void checkMachineSurvive() {
String machineIpStr = MachineUtils.getIPAndPort(serverPort);
XxlJobMachine xxlJobMachine = machineService.selectByMachineIp(machineIpStr);
if (xxlJobMachine != null) {
machineService.update(machineIpStr, new Date());
}
}
}

@ -0,0 +1,95 @@
package com.xxl.job.admin.core.id;
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/** 开始时间截 (2019-01-01) */
private final long twepoch = 1546272000000L;
/** 机器id所占的位数 */
private final long workerIdBits = 5L;
/** 数据标识id所占的位数 */
private final long datacenterIdBits = 5L;
private final long maxMachineId = 1023;
/** 序列在id中占的位数 */
private final long sequenceBits = 12L;
/** 机器ID向左移12位 */
private final long getMachineIdShift = sequenceBits;
/** 时间截向左移22位(5+5+12) */
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/** 生成序列的掩码这里为4095 (0b111111111111=0xfff=4095) */
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/** 机器Id */
private long machineId;
/** 毫秒内序列(0~4095) */
private long sequence = 0L;
/** 上次生成ID的时间截 */
private long lastTimestamp = -1L;
// ==============================Constructors=====================================
/**
*
*
* @param machineId ID (0~31)
*/
public SnowflakeIdWorker(long machineId) {
if (machineId > maxMachineId || machineId < 0) {
throw new IllegalArgumentException(
String.format("worker machineId can't be greater than %d or less than 0", maxMachineId));
}
this.machineId = machineId;
}
// ==============================Methods==========================================
/**
* ID (线)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
// 如果是同一时间生成的,则进行毫秒内序列
if (timestamp <= lastTimestamp) {
timestamp = lastTimestamp;
sequence = (sequence + 1) & sequenceMask;
// 毫秒内序列溢出
if (sequence == 0) {
// 阻塞到下一个毫秒,获得新的时间戳
timestamp++;
}
}
// 时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
// 上次生成ID的时间截
lastTimestamp = timestamp;
// 移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (machineId << getMachineIdShift) //
| sequence;
}
/**
*
*
* @return ()
*/
protected long timeGen() {
return System.currentTimeMillis();
}
}

@ -0,0 +1,90 @@
package com.xxl.job.admin.core.id.impl;
import com.xxl.job.admin.core.id.service.MachineService;
import com.xxl.job.admin.core.model.XxlJobMachine;
import com.xxl.job.admin.core.util.MachineUtils;
import com.xxl.job.admin.dao.XxlJobMachineDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.Random;
@Service
public class MachineServiceImpl implements MachineService {
private final Logger logger = LoggerFactory.getLogger(MachineServiceImpl.class);
@Value("${server.port}")
private String serverPort;
@Autowired
private XxlJobMachineDao xxlJobMachineDao;
@Override
public void save(XxlJobMachine xxlJobMachine) {
xxlJobMachineDao.save(xxlJobMachine);
}
@Override
public void update(String machineIp, Date heartLastTime) {
xxlJobMachineDao.update(machineIp,heartLastTime);
}
@Override
public XxlJobMachine selectByMachineIp(String machineIp) {
return xxlJobMachineDao.selectByHostIp(machineIp);
}
@Override
public Integer selectMaxMachineId() {
return xxlJobMachineDao.selectMaxMachineId();
}
@Override
public Integer getInitMachineId() {
String ipStr = MachineUtils.getIPAndPort(serverPort);
XxlJobMachine xxlJobMachine = selectByMachineIp(ipStr);
Date nowDate = new Date();
int machineId = -1;
if(xxlJobMachine != null){
update(ipStr,nowDate);
machineId = xxlJobMachine.getMachineId();
}else{
xxlJobMachine = new XxlJobMachine();
xxlJobMachine.setMachineIp(ipStr);
xxlJobMachine.setAddTime(nowDate);
xxlJobMachine.setHeartLastTime(nowDate);
Random random = new Random();
boolean flag = false;
for(int i = 0; i < 100; i++){
try {
Integer value = selectMaxMachineId();
machineId = value == null ? 1 : value+1;
xxlJobMachine.setMachineId(machineId);
save(xxlJobMachine);
flag = true;
break;
} catch (DuplicateKeyException e) {
try {
Thread.sleep(random.nextInt(2000)+1);
} catch (InterruptedException interruptedException) {
logger.error("sleep error,cause",interruptedException);
}
logger.error("retry >>>>>>>>>>>>> ");
} catch (Exception e){
logger.error("save error >>>>>>,system exit,cause",e);
}
}
if(!flag) {
logger.error("多次获取machineId失败退出程序");
System.exit(0);
}
}
return machineId;
}
}

@ -0,0 +1,18 @@
package com.xxl.job.admin.core.id.service;
import com.xxl.job.admin.core.model.XxlJobMachine;
import java.util.Date;
public interface MachineService {
void save(XxlJobMachine xxlJobMachine);
void update(String machineIp, Date heartLastTime);
XxlJobMachine selectByMachineIp(String machineIp);
Integer selectMaxMachineId();
Integer getInitMachineId();
}

@ -9,7 +9,7 @@ import java.util.List;
*/ */
public class XxlJobGroup { public class XxlJobGroup {
private int id; private long id;
private String appname; private String appname;
private String title; private String title;
private int addressType; // 执行器地址类型0=自动注册、1=手动录入 private int addressType; // 执行器地址类型0=自动注册、1=手动录入
@ -24,11 +24,11 @@ public class XxlJobGroup {
return registryList; return registryList;
} }
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }

@ -9,9 +9,9 @@ import java.util.Date;
*/ */
public class XxlJobInfo { public class XxlJobInfo {
private int id; // 主键ID private long id; // 主键ID
private int jobGroup; // 执行器主键ID private long jobGroup; // 执行器主键ID
private String jobCron; // 任务执行CRON表达式 private String jobCron; // 任务执行CRON表达式
private String jobDesc; private String jobDesc;
@ -40,19 +40,19 @@ public class XxlJobInfo {
private long triggerNextTime; // 下次调度时间 private long triggerNextTime; // 下次调度时间
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }
public int getJobGroup() { public long getJobGroup() {
return jobGroup; return jobGroup;
} }
public void setJobGroup(int jobGroup) { public void setJobGroup(long jobGroup) {
this.jobGroup = jobGroup; this.jobGroup = jobGroup;
} }

@ -11,8 +11,8 @@ public class XxlJobLog {
private long id; private long id;
// job info // job info
private int jobGroup; private long jobGroup;
private int jobId; private long jobId;
// execute info // execute info
private String executorAddress; private String executorAddress;
@ -42,19 +42,19 @@ public class XxlJobLog {
this.id = id; this.id = id;
} }
public int getJobGroup() { public long getJobGroup() {
return jobGroup; return jobGroup;
} }
public void setJobGroup(int jobGroup) { public void setJobGroup(long jobGroup) {
this.jobGroup = jobGroup; this.jobGroup = jobGroup;
} }
public int getJobId() { public long getJobId() {
return jobId; return jobId;
} }
public void setJobId(int jobId) { public void setJobId(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }

@ -8,27 +8,27 @@ import java.util.Date;
*/ */
public class XxlJobLogGlue { public class XxlJobLogGlue {
private int id; private long id;
private int jobId; // 任务主键ID private long jobId; // 任务主键ID
private String glueType; // GLUE类型 #com.xxl.job.core.glue.GlueTypeEnum private String glueType; // GLUE类型 #com.xxl.job.core.glue.GlueTypeEnum
private String glueSource; private String glueSource;
private String glueRemark; private String glueRemark;
private Date addTime; private Date addTime;
private Date updateTime; private Date updateTime;
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }
public int getJobId() { public long getJobId() {
return jobId; return jobId;
} }
public void setJobId(int jobId) { public void setJobId(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }

@ -4,7 +4,7 @@ import java.util.Date;
public class XxlJobLogReport { public class XxlJobLogReport {
private int id; private long id;
private Date triggerDay; private Date triggerDay;
@ -12,11 +12,11 @@ public class XxlJobLogReport {
private int sucCount; private int sucCount;
private int failCount; private int failCount;
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }

@ -0,0 +1,58 @@
package com.xxl.job.admin.core.model;
import java.util.Date;
public class XxlJobMachine {
/**
* IP
*/
private String machineIp;
/**
* IP
*/
private Integer machineId;
/**
*
*/
private Date addTime;
/**
*
*/
private Date heartLastTime;
public Date getHeartLastTime() {
return heartLastTime;
}
public void setHeartLastTime(Date heartLastTime) {
this.heartLastTime = heartLastTime;
}
public Integer getMachineId() {
return machineId;
}
public void setMachineId(Integer machineId) {
this.machineId = machineId;
}
public Date getAddTime() {
return addTime;
}
public void setAddTime(Date addTime) {
this.addTime = addTime;
}
public String getMachineIp() {
return machineIp;
}
public void setMachineIp(String machineIp) {
this.machineIp = machineIp;
}
}

@ -7,17 +7,17 @@ import java.util.Date;
*/ */
public class XxlJobRegistry { public class XxlJobRegistry {
private int id; private long id;
private String registryGroup; private String registryGroup;
private String registryKey; private String registryKey;
private String registryValue; private String registryValue;
private Date updateTime; private Date updateTime;
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }

@ -7,17 +7,17 @@ import org.springframework.util.StringUtils;
*/ */
public class XxlJobUser { public class XxlJobUser {
private int id; private long id;
private String username; // 账号 private String username; // 账号
private String password; // 密码 private String password; // 密码
private int role; // 角色0-普通用户、1-管理员 private int role; // 角色0-普通用户、1-管理员
private String permission; // 权限执行器ID列表多个逗号分割 private String permission; // 权限执行器ID列表多个逗号分割
public int getId() { public long getId() {
return id; return id;
} }
public void setId(int id) { public void setId(long id) {
this.id = id; this.id = id;
} }
@ -54,7 +54,7 @@ public class XxlJobUser {
} }
// plugin // plugin
public boolean validPermission(int jobGroup){ public boolean validPermission(long jobGroup){
if (this.role == 1) { if (this.role == 1) {
return true; return true;
} else { } else {

@ -56,7 +56,7 @@ public class ExecutorRouteConsistentHash extends ExecutorRouter {
return truncateHashCode; return truncateHashCode;
} }
public String hashJob(int jobId, List<String> addressList) { public String hashJob(long jobId, List<String> addressList) {
// ------A1------A2-------A3------ // ------A1------A2-------A3------
// -----------J1------------------ // -----------J1------------------

@ -17,10 +17,10 @@ import java.util.concurrent.ConcurrentMap;
*/ */
public class ExecutorRouteLFU extends ExecutorRouter { public class ExecutorRouteLFU extends ExecutorRouter {
private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>(); private static ConcurrentMap<Long, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<>();
private static long CACHE_VALID_TIME = 0; private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) { public String route(long jobId, List<String> addressList) {
// cache clear // cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) { if (System.currentTimeMillis() > CACHE_VALID_TIME) {

@ -19,10 +19,10 @@ import java.util.concurrent.ConcurrentMap;
*/ */
public class ExecutorRouteLRU extends ExecutorRouter { public class ExecutorRouteLRU extends ExecutorRouter {
private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>(); private static ConcurrentMap<Long, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<>();
private static long CACHE_VALID_TIME = 0; private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) { public String route(long jobId, List<String> addressList) {
// cache clear // cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) { if (System.currentTimeMillis() > CACHE_VALID_TIME) {

@ -14,9 +14,9 @@ import java.util.concurrent.ConcurrentMap;
*/ */
public class ExecutorRouteRound extends ExecutorRouter { public class ExecutorRouteRound extends ExecutorRouter {
private static ConcurrentMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>(); private static ConcurrentMap<Long, Integer> routeCountEachJob = new ConcurrentHashMap<>();
private static long CACHE_VALID_TIME = 0; private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) { private static int count(long jobId) {
// cache clear // cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) { if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear(); routeCountEachJob.clear();

@ -1,5 +1,6 @@
package com.xxl.job.admin.core.thread; package com.xxl.job.admin.core.thread;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig; import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog; import com.xxl.job.admin.core.model.XxlJobLog;
@ -38,7 +39,8 @@ public class JobFailMonitorHelper {
while (!toStop) { while (!toStop) {
try { try {
List<Long> failLogIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findFailJobLogIds(1000); PageHelper.startPage(1,1000);
List<Long> failLogIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findFailJobLogIds();
if (failLogIds!=null && !failLogIds.isEmpty()) { if (failLogIds!=null && !failLogIds.isEmpty()) {
for (long failLogId: failLogIds) { for (long failLogId: failLogIds) {

@ -1,5 +1,6 @@
package com.xxl.job.admin.core.thread; package com.xxl.job.admin.core.thread;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig; import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobLogReport; import com.xxl.job.admin.core.model.XxlJobLogReport;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -24,7 +25,6 @@ public class JobLogReportHelper {
return instance; return instance;
} }
private Thread logrThread; private Thread logrThread;
private volatile boolean toStop = false; private volatile boolean toStop = false;
public void start(){ public void start(){
@ -83,6 +83,7 @@ public class JobLogReportHelper {
// do refresh // do refresh
int ret = XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().update(xxlJobLogReport); int ret = XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().update(xxlJobLogReport);
if (ret < 1) { if (ret < 1) {
xxlJobLogReport.setId(XxlJobAdminConfig.getAdminConfig().getGenerateId().getId());
XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().save(xxlJobLogReport); XxlJobAdminConfig.getAdminConfig().getXxlJobLogReportDao().save(xxlJobLogReport);
} }
} }
@ -109,7 +110,8 @@ public class JobLogReportHelper {
// clean expired log // clean expired log
List<Long> logIds = null; List<Long> logIds = null;
do { do {
logIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findClearLogIds(0, 0, clearBeforeTime, 0, 1000); PageHelper.startPage(1,1000);
logIds = XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().findClearLogIds(0, 0, clearBeforeTime, null);
if (logIds!=null && logIds.size()>0) { if (logIds!=null && logIds.size()>0) {
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().clearLog(logIds); XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().clearLog(logIds);
} }

@ -4,6 +4,7 @@ import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobRegistry; import com.xxl.job.admin.core.model.XxlJobRegistry;
import com.xxl.job.core.enums.RegistryConfig; import com.xxl.job.core.enums.RegistryConfig;
import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -35,14 +36,16 @@ public class JobRegistryMonitorHelper {
if (groupList!=null && !groupList.isEmpty()) { if (groupList!=null && !groupList.isEmpty()) {
// remove dead address (admin/executor) // remove dead address (admin/executor)
List<Integer> ids = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findDead(RegistryConfig.DEAD_TIMEOUT, new Date()); Date date = DateUtil.addSecond(new Date(), -RegistryConfig.DEAD_TIMEOUT);
List<Long> ids = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findDead(date);
if (ids!=null && ids.size()>0) { if (ids!=null && ids.size()>0) {
XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().removeDead(ids); XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().removeDead(ids);
} }
// fresh online address (admin/executor) // fresh online address (admin/executor)
HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>(); HashMap<String, List<String>> appAddressMap = new HashMap<String, List<String>>();
List<XxlJobRegistry> list = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findAll(RegistryConfig.DEAD_TIMEOUT, new Date()); Date time = DateUtil.addSecond(new Date(), -RegistryConfig.DEAD_TIMEOUT);
List<XxlJobRegistry> list = XxlJobAdminConfig.getAdminConfig().getXxlJobRegistryDao().findAll(time);
if (list != null) { if (list != null) {
for (XxlJobRegistry item: list) { for (XxlJobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) { if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
@ -72,7 +75,7 @@ public class JobRegistryMonitorHelper {
} }
addressListStr = addressListStr.substring(0, addressListStr.length()-1); addressListStr = addressListStr.substring(0, addressListStr.length()-1);
} }
group.setAddressList(addressListStr); group.setAddressList(addressListStr == null ? "" : addressListStr);
XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().update(group); XxlJobAdminConfig.getAdminConfig().getXxlJobGroupDao().update(group);
} }
} }

@ -1,5 +1,6 @@
package com.xxl.job.admin.core.thread; package com.xxl.job.admin.core.thread;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.conf.XxlJobAdminConfig; import com.xxl.job.admin.core.conf.XxlJobAdminConfig;
import com.xxl.job.admin.core.cron.CronExpression; import com.xxl.job.admin.core.cron.CronExpression;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
@ -32,7 +33,7 @@ public class JobScheduleHelper {
private Thread ringThread; private Thread ringThread;
private volatile boolean scheduleThreadToStop = false; private volatile boolean scheduleThreadToStop = false;
private volatile boolean ringThreadToStop = false; private volatile boolean ringThreadToStop = false;
private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>(); private volatile static Map<Integer, List<Long>> ringData = new ConcurrentHashMap<>();
public void start(){ public void start(){
@ -76,7 +77,8 @@ public class JobScheduleHelper {
// 1、pre read // 1、pre read
long nowTime = System.currentTimeMillis(); long nowTime = System.currentTimeMillis();
List<XxlJobInfo> scheduleList = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount); PageHelper.startPage(1,preReadCount);
List<XxlJobInfo> scheduleList = XxlJobAdminConfig.getAdminConfig().getXxlJobInfoDao().scheduleJobQuery(nowTime + PRE_READ_MS);
if (scheduleList!=null && scheduleList.size()>0) { if (scheduleList!=null && scheduleList.size()>0) {
// 2、push time-ring // 2、push time-ring
for (XxlJobInfo jobInfo: scheduleList) { for (XxlJobInfo jobInfo: scheduleList) {
@ -226,10 +228,10 @@ public class JobScheduleHelper {
try { try {
// second data // second data
List<Integer> ringItemData = new ArrayList<>(); List<Long> ringItemData = new ArrayList<>();
int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度; int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度;
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
List<Integer> tmpData = ringData.remove( (nowSecond+60-i)%60 ); List<Long> tmpData = ringData.remove( (nowSecond+60-i)%60 );
if (tmpData != null) { if (tmpData != null) {
ringItemData.addAll(tmpData); ringItemData.addAll(tmpData);
} }
@ -239,7 +241,7 @@ public class JobScheduleHelper {
logger.debug(">>>>>>>>>>> xxl-job, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData) ); logger.debug(">>>>>>>>>>> xxl-job, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData) );
if (ringItemData.size() > 0) { if (ringItemData.size() > 0) {
// do trigger // do trigger
for (int jobId: ringItemData) { for (long jobId: ringItemData) {
// do trigger // do trigger
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null, null); JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null, null);
} }
@ -281,11 +283,11 @@ public class JobScheduleHelper {
} }
} }
private void pushTimeRing(int ringSecond, int jobId){ private void pushTimeRing(int ringSecond, long jobId){
// push async ring // push async ring
List<Integer> ringItemData = ringData.get(ringSecond); List<Long> ringItemData = ringData.get(ringSecond);
if (ringItemData == null) { if (ringItemData == null) {
ringItemData = new ArrayList<Integer>(); ringItemData = new ArrayList<>();
ringData.put(ringSecond, ringItemData); ringData.put(ringSecond, ringItemData);
} }
ringItemData.add(jobId); ringItemData.add(jobId);
@ -316,7 +318,7 @@ public class JobScheduleHelper {
boolean hasRingData = false; boolean hasRingData = false;
if (!ringData.isEmpty()) { if (!ringData.isEmpty()) {
for (int second : ringData.keySet()) { for (int second : ringData.keySet()) {
List<Integer> tmpData = ringData.get(second); List<Long> tmpData = ringData.get(second);
if (tmpData!=null && tmpData.size()>0) { if (tmpData!=null && tmpData.size()>0) {
hasRingData = true; hasRingData = true;
break; break;

@ -6,6 +6,7 @@ import com.xxl.job.admin.core.trigger.XxlJobTrigger;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.annotation.Resource;
import java.util.concurrent.*; import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -16,6 +17,8 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
public class JobTriggerPoolHelper { public class JobTriggerPoolHelper {
private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class); private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
@Resource
private XxlJobTrigger xxlJobTrigger;
// ---------------------- trigger pool ---------------------- // ---------------------- trigger pool ----------------------
@ -63,13 +66,13 @@ public class JobTriggerPoolHelper {
// job timeout count // job timeout count
private volatile long minTim = System.currentTimeMillis()/60000; // ms > min private volatile long minTim = System.currentTimeMillis()/60000; // ms > min
private volatile ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>(); private volatile ConcurrentMap<Long, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
/** /**
* add trigger * add trigger
*/ */
public void addTrigger(final int jobId, public void addTrigger(final long jobId,
final TriggerTypeEnum triggerType, final TriggerTypeEnum triggerType,
final int failRetryCount, final int failRetryCount,
final String executorShardingParam, final String executorShardingParam,
@ -143,7 +146,7 @@ public class JobTriggerPoolHelper {
* null: use job param * null: use job param
* not null: cover job param * not null: cover job param
*/ */
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam, String addressList) { public static void trigger(long jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam, String addressList) {
helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam, addressList); helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam, addressList);
} }

@ -15,6 +15,7 @@ import com.xxl.job.core.util.IpUtil;
import com.xxl.job.core.util.ThrowableUtil; import com.xxl.job.core.util.ThrowableUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.Date; import java.util.Date;
@ -22,9 +23,13 @@ import java.util.Date;
* xxl-job trigger * xxl-job trigger
* Created by xuxueli on 17/7/13. * Created by xuxueli on 17/7/13.
*/ */
@Component
public class XxlJobTrigger { public class XxlJobTrigger {
private static Logger logger = LoggerFactory.getLogger(XxlJobTrigger.class); private static Logger logger = LoggerFactory.getLogger(XxlJobTrigger.class);
// @Autowired
// private static GenerateId generateId;
/** /**
* trigger job * trigger job
* *
@ -41,7 +46,7 @@ public class XxlJobTrigger {
* null: use executor addressList * null: use executor addressList
* not null: cover * not null: cover
*/ */
public static void trigger(int jobId, public static void trigger(long jobId,
TriggerTypeEnum triggerType, TriggerTypeEnum triggerType,
int failRetryCount, int failRetryCount,
String executorShardingParam, String executorShardingParam,
@ -120,6 +125,10 @@ public class XxlJobTrigger {
jobLog.setJobGroup(jobInfo.getJobGroup()); jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId()); jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(new Date()); jobLog.setTriggerTime(new Date());
jobLog.setId(XxlJobAdminConfig.getAdminConfig().getGenerateId().getId());
jobLog.setHandleTime(new Date());
jobLog.setExecutorFailRetryCount(0);
jobLog.setAlarmStatus(0);
XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().save(jobLog); XxlJobAdminConfig.getAdminConfig().getXxlJobLogDao().save(jobLog);
logger.debug(">>>>>>>>>>> xxl-job trigger start, jobId:{}", jobLog.getId()); logger.debug(">>>>>>>>>>> xxl-job trigger start, jobId:{}", jobLog.getId());

@ -0,0 +1,60 @@
package com.xxl.job.admin.core.util;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.Enumeration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author
*
*/
public class MachineUtils {
private static final Logger logger = LoggerFactory.getLogger(MachineUtils.class);
private static String machineIp = null;
public static String getIPAndPort(String port) {
return getIP().concat(":").concat(port);
}
public static String getIP() {
if (machineIp == null) {
String ipv4 = getInet4Address();
logger.info("ipv4={}", ipv4);
machineIp = ipv4;
}
return machineIp;
}
/**
* Ipv4
*/
public static String getInet4Address() {
Enumeration<NetworkInterface> nis;
String ip = null;
try {
nis = NetworkInterface.getNetworkInterfaces();
for (; nis.hasMoreElements();) {
NetworkInterface ni = nis.nextElement();
Enumeration<InetAddress> ias = ni.getInetAddresses();
for (; ias.hasMoreElements();) {
InetAddress ia = ias.nextElement();
// ia instanceof Inet6Address && !ia.equals("")
if (ia instanceof Inet4Address && !ia.getHostAddress().equals("127.0.0.1")) {
ip = ia.getHostAddress();
}
}
}
} catch (Exception e) {
logger.error("getServerIpAddress执行出错" + e.getMessage() + "," + e.getCause());
}
return ip;
}
}

@ -20,18 +20,14 @@ public interface XxlJobGroupDao {
public int update(XxlJobGroup xxlJobGroup); public int update(XxlJobGroup xxlJobGroup);
public int remove(@Param("id") int id); public int remove(@Param("id") long Id);
public XxlJobGroup load(@Param("id") int id); public XxlJobGroup load(@Param("id") long Id);
public List<XxlJobGroup> pageList(@Param("offset") int offset, public List<XxlJobGroup> pageList(@Param("appname") String appname,
@Param("pagesize") int pagesize,
@Param("appname") String appname,
@Param("title") String title); @Param("title") String title);
public int pageListCount(@Param("offset") int offset, public int pageListCount(@Param("appname") String appname,
@Param("pagesize") int pagesize,
@Param("appname") String appname,
@Param("title") String title); @Param("title") String title);
} }

@ -14,16 +14,12 @@ import java.util.List;
@Mapper @Mapper
public interface XxlJobInfoDao { public interface XxlJobInfoDao {
public List<XxlJobInfo> pageList(@Param("offset") int offset, public List<XxlJobInfo> pageList(@Param("jobGroup") long jobGroup,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus, @Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc, @Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler, @Param("executorHandler") String executorHandler,
@Param("author") String author); @Param("author") String author);
public int pageListCount(@Param("offset") int offset, public int pageListCount(@Param("jobGroup") long jobGroup,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus, @Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc, @Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler, @Param("executorHandler") String executorHandler,
@ -31,17 +27,17 @@ public interface XxlJobInfoDao {
public int save(XxlJobInfo info); public int save(XxlJobInfo info);
public XxlJobInfo loadById(@Param("id") int id); public XxlJobInfo loadById(@Param("id") long id);
public int update(XxlJobInfo xxlJobInfo); public int update(XxlJobInfo xxlJobInfo);
public int delete(@Param("id") long id); public int delete(@Param("id") long id);
public List<XxlJobInfo> getJobsByGroup(@Param("jobGroup") int jobGroup); public List<XxlJobInfo> getJobsByGroup(@Param("jobGroup") long jobGroup);
public int findAllCount(); public int findAllCount();
public List<XxlJobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize ); public List<XxlJobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime );
public int scheduleUpdate(XxlJobInfo xxlJobInfo); public int scheduleUpdate(XxlJobInfo xxlJobInfo);

@ -16,17 +16,13 @@ import java.util.Map;
public interface XxlJobLogDao { public interface XxlJobLogDao {
// exist jobId not use jobGroup, not exist use jobGroup // exist jobId not use jobGroup, not exist use jobGroup
public List<XxlJobLog> pageList(@Param("offset") int offset, public List<XxlJobLog> pageList(@Param("jobGroup") long jobGroup,
@Param("pagesize") int pagesize, @Param("jobId") long jobId,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart, @Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd, @Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus); @Param("logStatus") int logStatus);
public int pageListCount(@Param("offset") int offset, public int pageListCount(@Param("jobGroup") long jobGroup,
@Param("pagesize") int pagesize, @Param("jobId") long jobId,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart, @Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd, @Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus); @Param("logStatus") int logStatus);
@ -39,19 +35,19 @@ public interface XxlJobLogDao {
public int updateHandleInfo(XxlJobLog xxlJobLog); public int updateHandleInfo(XxlJobLog xxlJobLog);
public int delete(@Param("jobId") int jobId); public int delete(@Param("jobId") long jobId);
public Map<String, Object> findLogReport(@Param("from") Date from, public Map<String, Object> findLogReport(@Param("from") Date from,
@Param("to") Date to); @Param("to") Date to);
public List<Long> findClearLogIds(@Param("jobGroup") int jobGroup, public List<Long> findClearLogIds(@Param("jobGroup") long jobGroup,
@Param("jobId") int jobId, @Param("jobId") long jobId,
@Param("clearBeforeTime") Date clearBeforeTime, @Param("clearBeforeTime") Date clearBeforeTime,
@Param("clearBeforeNum") int clearBeforeNum, @Param("recentLogIds") List<Long> recentLogIds
@Param("pagesize") int pagesize); );
public int clearLog(@Param("logIds") List<Long> logIds); public int clearLog(@Param("logIds") List<Long> logIds);
public List<Long> findFailJobLogIds(@Param("pagesize") int pagesize); public List<Long> findFailJobLogIds();
public int updateAlarmStatus(@Param("logId") long logId, public int updateAlarmStatus(@Param("logId") long logId,
@Param("oldAlarmStatus") int oldAlarmStatus, @Param("oldAlarmStatus") int oldAlarmStatus,
@ -59,4 +55,6 @@ public interface XxlJobLogDao {
public List<Long> findLostJobIds(@Param("losedTime") Date losedTime); public List<Long> findLostJobIds(@Param("losedTime") Date losedTime);
public List<Long> findRecentLogs(@Param("jobGroup") long jobGroup, @Param("jobId") long jobId);
} }

@ -15,10 +15,12 @@ public interface XxlJobLogGlueDao {
public int save(XxlJobLogGlue xxlJobLogGlue); public int save(XxlJobLogGlue xxlJobLogGlue);
public List<XxlJobLogGlue> findByJobId(@Param("jobId") int jobId); public List<XxlJobLogGlue> findByJobId(@Param("jobId") long jobId);
public int removeOld(@Param("jobId") int jobId, @Param("limit") int limit); public int removeOld(@Param("jobId") long jobId,@Param("recentLogIds") List<Long> recentLogIds);
public int deleteByJobId(@Param("jobId") int jobId); public int deleteByJobId(@Param("jobId") long jobId);
public List<Long> findIds(@Param("jobId") long jobId);
} }

@ -14,7 +14,7 @@ import java.util.List;
@Mapper @Mapper
public interface XxlJobLogReportDao { public interface XxlJobLogReportDao {
public int save(XxlJobLogReport xxlJobLogReport); void save(XxlJobLogReport xxlJobLogReport);
public int update(XxlJobLogReport xxlJobLogReport); public int update(XxlJobLogReport xxlJobLogReport);

@ -0,0 +1,20 @@
package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.model.XxlJobMachine;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
@Mapper
public interface XxlJobMachineDao {
void save(@Param("xxlJobMachine")XxlJobMachine xxlJobMachine);
void update(@Param("machineIp") String machineIp, @Param("heartLastTime") Date heartLastTime);
XxlJobMachine selectByHostIp(String machineIp);
Integer selectMaxMachineId();
}

@ -13,13 +13,11 @@ import java.util.List;
@Mapper @Mapper
public interface XxlJobRegistryDao { public interface XxlJobRegistryDao {
public List<Integer> findDead(@Param("timeout") int timeout, public List<Long> findDead(@Param("date") Date date);
@Param("nowTime") Date nowTime);
public int removeDead(@Param("ids") List<Integer> ids); public int removeDead(@Param("ids") List<Long> ids);
public List<XxlJobRegistry> findAll(@Param("timeout") int timeout, public List<XxlJobRegistry> findAll(@Param("date") Date date);
@Param("nowTime") Date nowTime);
public int registryUpdate(@Param("registryGroup") String registryGroup, public int registryUpdate(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey, @Param("registryKey") String registryKey,
@ -29,7 +27,8 @@ public interface XxlJobRegistryDao {
public int registrySave(@Param("registryGroup") String registryGroup, public int registrySave(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey, @Param("registryKey") String registryKey,
@Param("registryValue") String registryValue, @Param("registryValue") String registryValue,
@Param("updateTime") Date updateTime); @Param("updateTime") Date updateTime,
@Param("id") long id);
public int registryDelete(@Param("registryGroup") String registryGroup, public int registryDelete(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey, @Param("registryKey") String registryKey,

@ -11,21 +11,16 @@ import java.util.List;
@Mapper @Mapper
public interface XxlJobUserDao { public interface XxlJobUserDao {
public List<XxlJobUser> pageList(@Param("offset") int offset, public List<XxlJobUser> pageList(@Param("username") String username, @Param("role") int role);
@Param("pagesize") int pagesize,
@Param("username") String username, public int pageListCount(@Param("username") String username, @Param("role") int role);
@Param("role") int role);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("username") String username,
@Param("role") int role);
public XxlJobUser loadByUserName(@Param("username") String username); public XxlJobUser loadByUserName(@Param("username") String username);
public int save(XxlJobUser xxlJobUser); public void save(XxlJobUser xxlJobUser);
public int update(XxlJobUser xxlJobUser); public int update(XxlJobUser xxlJobUser);
public int delete(@Param("id") int id); public int delete(@Param("id") long id);
} }

@ -25,7 +25,7 @@ public interface XxlJobService {
* @param author * @param author
* @return * @return
*/ */
public Map<String, Object> pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author); public Map<String, Object> pageList(int start, int length, long jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author);
/** /**
* add job * add job
@ -49,7 +49,7 @@ public interface XxlJobService {
* @param id * @param id
* @return * @return
*/ */
public ReturnT<String> remove(int id); public ReturnT<String> remove(long id);
/** /**
* start job * start job
@ -57,7 +57,7 @@ public interface XxlJobService {
* @param id * @param id
* @return * @return
*/ */
public ReturnT<String> start(int id); public ReturnT<String> start(long id);
/** /**
* stop job * stop job
@ -65,7 +65,7 @@ public interface XxlJobService {
* @param id * @param id
* @return * @return
*/ */
public ReturnT<String> stop(int id); public ReturnT<String> stop(long id);
/** /**
* dashboard info * dashboard info

@ -1,5 +1,6 @@
package com.xxl.job.admin.service.impl; package com.xxl.job.admin.service.impl;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.model.XxlJobLog; import com.xxl.job.admin.core.model.XxlJobLog;
import com.xxl.job.admin.core.thread.JobTriggerPoolHelper; import com.xxl.job.admin.core.thread.JobTriggerPoolHelper;
@ -39,6 +40,8 @@ public class AdminBizImpl implements AdminBiz {
private XxlJobRegistryDao xxlJobRegistryDao; private XxlJobRegistryDao xxlJobRegistryDao;
@Resource @Resource
private XxlJobGroupDao xxlJobGroupDao; private XxlJobGroupDao xxlJobGroupDao;
@Resource
private GenerateId generateId;
@Override @Override
@ -141,7 +144,7 @@ public class AdminBizImpl implements AdminBiz {
int ret = xxlJobRegistryDao.registryUpdate(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date()); int ret = xxlJobRegistryDao.registryUpdate(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date());
if (ret < 1) { if (ret < 1) {
xxlJobRegistryDao.registrySave(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date()); xxlJobRegistryDao.registrySave(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue(), new Date(), generateId.getId());
// fresh // fresh
freshGroupRegistryInfo(registryParam); freshGroupRegistryInfo(registryParam);

@ -1,5 +1,7 @@
package com.xxl.job.admin.service.impl; package com.xxl.job.admin.service.impl;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobGroup;
import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobInfo;
import com.xxl.job.admin.core.cron.CronExpression; import com.xxl.job.admin.core.cron.CronExpression;
@ -16,7 +18,6 @@ import com.xxl.job.core.util.DateUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.text.ParseException; import java.text.ParseException;
@ -40,13 +41,16 @@ public class XxlJobServiceImpl implements XxlJobService {
private XxlJobLogGlueDao xxlJobLogGlueDao; private XxlJobLogGlueDao xxlJobLogGlueDao;
@Resource @Resource
private XxlJobLogReportDao xxlJobLogReportDao; private XxlJobLogReportDao xxlJobLogReportDao;
@Resource
private GenerateId generateId;
@Override @Override
public Map<String, Object> pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) { public Map<String, Object> pageList(int start, int length, long jobGroup, int triggerStatus, String jobDesc, String executorHandler, String author) {
// page list // page list
List<XxlJobInfo> list = xxlJobInfoDao.pageList(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author); PageHelper.startPage(start/length+1,length);
int list_count = xxlJobInfoDao.pageListCount(start, length, jobGroup, triggerStatus, jobDesc, executorHandler, author); List<XxlJobInfo> list = xxlJobInfoDao.pageList(jobGroup, triggerStatus, jobDesc, executorHandler, author);
int list_count = xxlJobInfoDao.pageListCount(jobGroup, triggerStatus, jobDesc, executorHandler, author);
// package result // package result
Map<String, Object> maps = new HashMap<String, Object>(); Map<String, Object> maps = new HashMap<String, Object>();
@ -120,6 +124,7 @@ public class XxlJobServiceImpl implements XxlJobService {
jobInfo.setAddTime(new Date()); jobInfo.setAddTime(new Date());
jobInfo.setUpdateTime(new Date()); jobInfo.setUpdateTime(new Date());
jobInfo.setGlueUpdatetime(new Date()); jobInfo.setGlueUpdatetime(new Date());
jobInfo.setId(generateId.getId());
xxlJobInfoDao.save(jobInfo); xxlJobInfoDao.save(jobInfo);
if (jobInfo.getId() < 1) { if (jobInfo.getId() < 1) {
return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_add")+I18nUtil.getString("system_fail")) ); return new ReturnT<String>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_add")+I18nUtil.getString("system_fail")) );
@ -128,6 +133,10 @@ public class XxlJobServiceImpl implements XxlJobService {
return new ReturnT<String>(String.valueOf(jobInfo.getId())); return new ReturnT<String>(String.valueOf(jobInfo.getId()));
} }
public static void main(String[] args) {
// Integer a = 185337369535193088;
}
private boolean isNumeric(String str){ private boolean isNumeric(String str){
try { try {
int result = Integer.valueOf(str); int result = Integer.valueOf(str);
@ -232,7 +241,7 @@ public class XxlJobServiceImpl implements XxlJobService {
} }
@Override @Override
public ReturnT<String> remove(int id) { public ReturnT<String> remove(long id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id); XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
if (xxlJobInfo == null) { if (xxlJobInfo == null) {
return ReturnT.SUCCESS; return ReturnT.SUCCESS;
@ -245,7 +254,7 @@ public class XxlJobServiceImpl implements XxlJobService {
} }
@Override @Override
public ReturnT<String> start(int id) { public ReturnT<String> start(long id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id); XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
// next trigger time (5s后生效避开预读周期) // next trigger time (5s后生效避开预读周期)
@ -271,7 +280,7 @@ public class XxlJobServiceImpl implements XxlJobService {
} }
@Override @Override
public ReturnT<String> stop(int id) { public ReturnT<String> stop(long id) {
XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id); XxlJobInfo xxlJobInfo = xxlJobInfoDao.loadById(id);
xxlJobInfo.setTriggerStatus(0); xxlJobInfo.setTriggerStatus(0);

@ -20,13 +20,28 @@ spring.freemarker.settings.number_format=0.##########
### mybatis ### mybatis
mybatis.mapper-locations=classpath:/mybatis-mapper/*Mapper.xml mybatis.mapper-locations=classpath:/mybatis-mapper/*Mapper.xml
mybatis.configuration.jdbc-type-for-null=NULL
#mybatis.type-aliases-package=com.xxl.job.admin.core.model #mybatis.type-aliases-package=com.xxl.job.admin.core.model
logging.level.com.xxl.job.admin.dao.XxlJobLogDao=debug
logging.level.com.xxl.job.admin.dao.XxlJobRegistryDao=debug
logging.level.com.xxl.job.admin.dao.XxlJobLogReportDao=debug
### xxl-job, datasource ### pagehelper
spring.datasource.url=jdbc:mysql://127.0.0.1:3306/xxl_job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai pagehelper.auto-runtime-dialect=true
spring.datasource.username=root
spring.datasource.password=root_pwd ### xxl-job, mysql datasource
spring.datasource.url=jdbc:xxxxxxxx
spring.datasource.username=xxxxxxxxxx
spring.datasource.password=xxxxxxxx
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.hikari.connection-test-query=SELECT 1
### xxl-job, oracle datasource
#spring.datasource.url = xxxxxxx
#spring.datasource.username=xxxxxxx
#spring.datasource.password=xxxxxxx
#spring.datasource.driver-class-name=oracle.jdbc.driver.OracleDriver
#spring.datasource.hikari.connection-test-query=SELECT 1 FROM DUAL
### datasource-pool ### datasource-pool
spring.datasource.type=com.zaxxer.hikari.HikariDataSource spring.datasource.type=com.zaxxer.hikari.HikariDataSource
@ -37,7 +52,6 @@ spring.datasource.hikari.idle-timeout=30000
spring.datasource.hikari.pool-name=HikariCP spring.datasource.hikari.pool-name=HikariCP
spring.datasource.hikari.max-lifetime=900000 spring.datasource.hikari.max-lifetime=900000
spring.datasource.hikari.connection-timeout=10000 spring.datasource.hikari.connection-timeout=10000
spring.datasource.hikari.connection-test-query=SELECT 1
### xxl-job, email ### xxl-job, email
spring.mail.host=smtp.qq.com spring.mail.host=smtp.qq.com
@ -61,3 +75,5 @@ xxl.job.triggerpool.slow.max=100
### xxl-job, log retention days ### xxl-job, log retention days
xxl.job.logretentiondays=30 xxl.job.logretentiondays=30

@ -12,75 +12,74 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.app_name, app_name,
t.title, title,
t.address_type, address_type,
t.address_list address_list
</sql> </sql>
<select id="findAll" resultMap="XxlJobGroup"> <select id="findAll" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t FROM xxl_job_group
ORDER BY t.app_name, t.title, t.id ASC ORDER BY app_name, title, id ASC
</select> </select>
<select id="findByAddressType" parameterType="java.lang.Integer" resultMap="XxlJobGroup"> <select id="findByAddressType" parameterType="java.lang.Integer" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t FROM xxl_job_group
WHERE t.address_type = #{addressType} WHERE address_type = #{addressType}
ORDER BY t.app_name, t.title, t.id ASC ORDER BY app_name, title, id ASC
</select> </select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" >
INSERT INTO xxl_job_group ( `app_name`, `title`, `address_type`, `address_list`) INSERT INTO xxl_job_group (id, app_name, title, address_type, address_list)
values ( #{appname}, #{title}, #{addressType}, #{addressList}); values (#{id}, #{appname}, #{title}, #{addressType}, #{addressList})
</insert> </insert>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" > <update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobGroup" >
UPDATE xxl_job_group UPDATE xxl_job_group
SET `app_name` = #{appname}, SET app_name = #{appname},
`title` = #{title}, title = #{title},
`address_type` = #{addressType}, address_type = #{addressType},
`address_list` = #{addressList} address_list = #{addressList}
WHERE id = #{id} WHERE id = #{id}
</update> </update>
<delete id="remove" parameterType="java.lang.Integer" > <delete id="remove" parameterType="java.lang.Long" >
DELETE FROM xxl_job_group DELETE FROM xxl_job_group
WHERE id = #{id} WHERE id = #{id}
</delete> </delete>
<select id="load" parameterType="java.lang.Integer" resultMap="XxlJobGroup"> <select id="load" parameterType="java.lang.Long" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t FROM xxl_job_group
WHERE t.id = #{id} WHERE id = #{id}
</select> </select>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobGroup"> <select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobGroup">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_group AS t FROM xxl_job_group
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="appname != null and appname != ''"> <if test="appname != null and appname != ''">
AND t.app_name like CONCAT(CONCAT('%', #{appname}), '%') AND app_name like CONCAT(CONCAT('%', #{appname}), '%')
</if> </if>
<if test="title != null and title != ''"> <if test="title != null and title != ''">
AND t.title like CONCAT(CONCAT('%', #{title}), '%') AND title like CONCAT(CONCAT('%', #{title}), '%')
</if> </if>
</trim> </trim>
ORDER BY t.app_name, t.title, t.id ASC ORDER BY app_name, title, id ASC
LIMIT #{offset}, #{pagesize}
</select> </select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int"> <select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1) SELECT count(1)
FROM xxl_job_group AS t FROM xxl_job_group
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="appname != null and appname != ''"> <if test="appname != null and appname != ''">
AND t.app_name like CONCAT(CONCAT('%', #{appname}), '%') AND app_name like CONCAT(CONCAT('%', #{appname}), '%')
</if> </if>
<if test="title != null and title != ''"> <if test="title != null and title != ''">
AND t.title like CONCAT(CONCAT('%', #{title}), '%') AND title like CONCAT(CONCAT('%', #{title}), '%')
</if> </if>
</trim> </trim>
</select> </select>

@ -36,78 +36,78 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.job_group, job_group,
t.job_cron, job_cron,
t.job_desc, job_desc,
t.add_time, add_time,
t.update_time, update_time,
t.author, author,
t.alarm_email, alarm_email,
t.executor_route_strategy, executor_route_strategy,
t.executor_handler, executor_handler,
t.executor_param, executor_param,
t.executor_block_strategy, executor_block_strategy,
t.executor_timeout, executor_timeout,
t.executor_fail_retry_count, executor_fail_retry_count,
t.glue_type, glue_type,
t.glue_source, glue_source,
t.glue_remark, glue_remark,
t.glue_updatetime, glue_updatetime,
t.child_jobid, child_jobid,
t.trigger_status, trigger_status,
t.trigger_last_time, trigger_last_time,
t.trigger_next_time trigger_next_time
</sql> </sql>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobInfo"> <select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t FROM xxl_job_info
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0"> <if test="jobGroup gt 0">
AND t.job_group = #{jobGroup} AND job_group = #{jobGroup}
</if> </if>
<if test="triggerStatus gte 0"> <if test="triggerStatus gte 0">
AND t.trigger_status = #{triggerStatus} AND trigger_status = #{triggerStatus}
</if> </if>
<if test="jobDesc != null and jobDesc != ''"> <if test="jobDesc != null and jobDesc != ''">
AND t.job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%') AND job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%')
</if> </if>
<if test="executorHandler != null and executorHandler != ''"> <if test="executorHandler != null and executorHandler != ''">
AND t.executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%') AND executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%')
</if> </if>
<if test="author != null and author != ''"> <if test="author != null and author != ''">
AND t.author like CONCAT(CONCAT('%', #{author}), '%') AND author like CONCAT(CONCAT('%', #{author}), '%')
</if> </if>
</trim> </trim>
ORDER BY id DESC ORDER BY id DESC
LIMIT #{offset}, #{pagesize}
</select> </select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int"> <select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1) SELECT count(1)
FROM xxl_job_info AS t FROM xxl_job_info
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0"> <if test="jobGroup gt 0">
AND t.job_group = #{jobGroup} AND job_group = #{jobGroup}
</if> </if>
<if test="triggerStatus gte 0"> <if test="triggerStatus gte 0">
AND t.trigger_status = #{triggerStatus} AND trigger_status = #{triggerStatus}
</if> </if>
<if test="jobDesc != null and jobDesc != ''"> <if test="jobDesc != null and jobDesc != ''">
AND t.job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%') AND job_desc like CONCAT(CONCAT('%', #{jobDesc}), '%')
</if> </if>
<if test="executorHandler != null and executorHandler != ''"> <if test="executorHandler != null and executorHandler != ''">
AND t.executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%') AND executor_handler like CONCAT(CONCAT('%', #{executorHandler}), '%')
</if> </if>
<if test="author != null and author != ''"> <if test="author != null and author != ''">
AND t.author like CONCAT(CONCAT('%', #{author}), '%') AND author like CONCAT(CONCAT('%', #{author}), '%')
</if> </if>
</trim> </trim>
</select> </select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" >
INSERT INTO xxl_job_info ( INSERT INTO xxl_job_info (
id,
job_group, job_group,
job_cron, job_cron,
job_desc, job_desc,
@ -130,11 +130,12 @@
trigger_last_time, trigger_last_time,
trigger_next_time trigger_next_time
) VALUES ( ) VALUES (
#{id},
#{jobGroup}, #{jobGroup},
#{jobCron}, #{jobCron},
#{jobDesc}, #{jobDesc},
#{addTime}, #{addTime,jdbcType=TIMESTAMP},
#{updateTime}, #{updateTime,jdbcType=TIMESTAMP},
#{author}, #{author},
#{alarmEmail}, #{alarmEmail},
#{executorRouteStrategy}, #{executorRouteStrategy},
@ -146,12 +147,12 @@
#{glueType}, #{glueType},
#{glueSource}, #{glueSource},
#{glueRemark}, #{glueRemark},
#{glueUpdatetime}, #{glueUpdatetime,jdbcType=TIMESTAMP},
#{childJobId}, #{childJobId},
#{triggerStatus}, #{triggerStatus},
#{triggerLastTime}, #{triggerLastTime},
#{triggerNextTime} #{triggerNextTime}
); )
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id"> <!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID() SELECT LAST_INSERT_ID()
/*SELECT @@IDENTITY AS id*/ /*SELECT @@IDENTITY AS id*/
@ -160,8 +161,8 @@
<select id="loadById" parameterType="java.util.HashMap" resultMap="XxlJobInfo"> <select id="loadById" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t FROM xxl_job_info
WHERE t.id = #{id} WHERE id = #{id}
</select> </select>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" > <update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" >
@ -198,8 +199,8 @@
<select id="getJobsByGroup" parameterType="java.util.HashMap" resultMap="XxlJobInfo"> <select id="getJobsByGroup" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t FROM xxl_job_info
WHERE t.job_group = #{jobGroup} WHERE job_group = #{jobGroup}
</select> </select>
<select id="findAllCount" resultType="int"> <select id="findAllCount" resultType="int">
@ -210,11 +211,10 @@
<select id="scheduleJobQuery" parameterType="java.util.HashMap" resultMap="XxlJobInfo"> <select id="scheduleJobQuery" parameterType="java.util.HashMap" resultMap="XxlJobInfo">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_info AS t FROM xxl_job_info
WHERE t.trigger_status = 1 WHERE trigger_status = 1
and t.trigger_next_time <![CDATA[ <= ]]> #{maxNextTime} and trigger_next_time <![CDATA[ <= ]]> #{maxNextTime}
ORDER BY id ASC ORDER BY id ASC
LIMIT #{pagesize}
</select> </select>
<update id="scheduleUpdate" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" > <update id="scheduleUpdate" parameterType="com.xxl.job.admin.core.model.XxlJobInfo" >

@ -14,58 +14,66 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.job_id, job_id,
t.glue_type, glue_type,
t.glue_source, glue_source,
t.glue_remark, glue_remark,
t.add_time, add_time,
t.update_time update_time
</sql> </sql>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogGlue" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogGlue" >
INSERT INTO xxl_job_logglue ( INSERT INTO xxl_job_logglue (
`job_id`, id,
`glue_type`, job_id,
`glue_source`, glue_type,
`glue_remark`, glue_source,
`add_time`, glue_remark,
`update_time` add_time,
update_time
) VALUES ( ) VALUES (
#{id},
#{jobId}, #{jobId},
#{glueType}, #{glueType},
#{glueSource}, #{glueSource},
#{glueRemark}, #{glueRemark},
#{addTime}, #{addTime,jdbcType=TIMESTAMP},
#{updateTime} #{updateTime,jdbcType=TIMESTAMP}
); )
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id"> <!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID() SELECT LAST_INSERT_ID()
</selectKey>--> </selectKey>-->
</insert> </insert>
<select id="findByJobId" parameterType="java.lang.Integer" resultMap="XxlJobLogGlue"> <select id="findByJobId" parameterType="java.lang.Long" resultMap="XxlJobLogGlue">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_logglue AS t FROM xxl_job_logglue
WHERE t.job_id = #{jobId} WHERE job_id = #{jobId}
ORDER BY id DESC ORDER BY id DESC
</select> </select>
<delete id="removeOld" > <delete id="removeOld" >
DELETE FROM xxl_job_logglue DELETE FROM xxl_job_logglue
WHERE id NOT in( WHERE job_id = #{jobId}
SELECT id FROM( <if test="recentLogIds != null and recentLogIds.size() gt 0">
SELECT id FROM xxl_job_logglue AND id NOT in
WHERE `job_id` = #{jobId} <foreach collection="recentLogIds" item="item" open="(" close=")" separator=",">
ORDER BY update_time desc #{item}
LIMIT 0, #{limit} </foreach>
) t1 </if>
) AND `job_id` = #{jobId} )
</delete> </delete>
<delete id="deleteByJobId" parameterType="java.lang.Integer" > <delete id="deleteByJobId" parameterType="java.lang.Long" >
DELETE FROM xxl_job_logglue DELETE FROM xxl_job_logglue
WHERE `job_id` = #{jobId} WHERE job_id = #{jobId}
</delete> </delete>
<select id="findIds" resultType="long">
SELECT id FROM xxl_job_logglue
WHERE job_id = #{jobId}
ORDER BY update_time desc
</select>
</mapper> </mapper>

@ -27,110 +27,115 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.job_group, job_group,
t.job_id, job_id,
t.executor_address, executor_address,
t.executor_handler, executor_handler,
t.executor_param, executor_param,
t.executor_sharding_param, executor_sharding_param,
t.executor_fail_retry_count, executor_fail_retry_count,
t.trigger_time, trigger_time,
t.trigger_code, trigger_code,
t.trigger_msg, trigger_msg,
t.handle_time, handle_time,
t.handle_code, handle_code,
t.handle_msg, handle_msg,
t.alarm_status alarm_status
</sql> </sql>
<select id="pageList" resultMap="XxlJobLog"> <select id="pageList" resultMap="XxlJobLog">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_log AS t FROM xxl_job_log
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobId==0 and jobGroup gt 0"> <if test="jobId==0 and jobGroup gt 0">
AND t.job_group = #{jobGroup} AND job_group = #{jobGroup}
</if> </if>
<if test="jobId gt 0"> <if test="jobId gt 0">
AND t.job_id = #{jobId} AND job_id = #{jobId}
</if> </if>
<if test="triggerTimeStart != null"> <if test="triggerTimeStart != null">
AND t.trigger_time <![CDATA[ >= ]]> #{triggerTimeStart} AND trigger_time <![CDATA[ >= ]]> #{triggerTimeStart,jdbcType=TIMESTAMP}
</if> </if>
<if test="triggerTimeEnd != null"> <if test="triggerTimeEnd != null">
AND t.trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd} AND trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd,jdbcType=TIMESTAMP}
</if> </if>
<if test="logStatus == 1" > <if test="logStatus == 1" >
AND t.handle_code = 200 AND handle_code = 200
</if> </if>
<if test="logStatus == 2" > <if test="logStatus == 2" >
AND ( AND (
t.trigger_code NOT IN (0, 200) OR trigger_code NOT IN (0, 200) OR
t.handle_code NOT IN (0, 200) handle_code NOT IN (0, 200)
) )
</if> </if>
<if test="logStatus == 3" > <if test="logStatus == 3" >
AND t.trigger_code = 200 AND trigger_code = 200
AND t.handle_code = 0 AND handle_code = 0
</if> </if>
</trim> </trim>
ORDER BY t.trigger_time DESC ORDER BY trigger_time DESC
LIMIT #{offset}, #{pagesize}
</select> </select>
<select id="pageListCount" resultType="int"> <select id="pageListCount" resultType="int">
SELECT count(1) SELECT count(1)
FROM xxl_job_log AS t FROM xxl_job_log
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobId==0 and jobGroup gt 0"> <if test="jobId==0 and jobGroup gt 0">
AND t.job_group = #{jobGroup} AND job_group = #{jobGroup}
</if> </if>
<if test="jobId gt 0"> <if test="jobId gt 0">
AND t.job_id = #{jobId} AND job_id = #{jobId}
</if> </if>
<if test="triggerTimeStart != null"> <if test="triggerTimeStart != null">
AND t.trigger_time <![CDATA[ >= ]]> #{triggerTimeStart} AND trigger_time <![CDATA[ >= ]]> #{triggerTimeStart,jdbcType=TIMESTAMP}
</if> </if>
<if test="triggerTimeEnd != null"> <if test="triggerTimeEnd != null">
AND t.trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd} AND trigger_time <![CDATA[ <= ]]> #{triggerTimeEnd,jdbcType=TIMESTAMP}
</if> </if>
<if test="logStatus == 1" > <if test="logStatus == 1" >
AND t.handle_code = 200 AND handle_code = 200
</if> </if>
<if test="logStatus == 2" > <if test="logStatus == 2" >
AND ( AND (
t.trigger_code NOT IN (0, 200) OR trigger_code NOT IN (0, 200) OR
t.handle_code NOT IN (0, 200) handle_code NOT IN (0, 200)
) )
</if> </if>
<if test="logStatus == 3" > <if test="logStatus == 3" >
AND t.trigger_code = 200 AND trigger_code = 200
AND t.handle_code = 0 AND handle_code = 0
</if> </if>
</trim> </trim>
</select> </select>
<select id="load" parameterType="java.lang.Long" resultMap="XxlJobLog"> <select id="load" parameterType="java.lang.Long" resultMap="XxlJobLog">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_log AS t FROM xxl_job_log
WHERE t.id = #{id} WHERE id = #{id}
</select> </select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLog" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLog" >
INSERT INTO xxl_job_log ( INSERT INTO xxl_job_log (
`job_group`, id,
`job_id`, job_group,
`trigger_time`, job_id,
`trigger_code`, trigger_time,
`handle_code` trigger_code,
handle_code,
executor_fail_retry_count,
alarm_status
) VALUES ( ) VALUES (
#{id},
#{jobGroup}, #{jobGroup},
#{jobId}, #{jobId},
#{triggerTime}, #{triggerTime,jdbcType=TIMESTAMP},
#{triggerCode}, #{triggerCode},
#{handleCode} #{handleCode},
); #{executorFailRetryCount},
#{alarmStatus}
)
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id"> <!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID() SELECT LAST_INSERT_ID()
</selectKey>--> </selectKey>-->
@ -139,24 +144,24 @@
<update id="updateTriggerInfo" > <update id="updateTriggerInfo" >
UPDATE xxl_job_log UPDATE xxl_job_log
SET SET
`trigger_time`= #{triggerTime}, trigger_time= #{triggerTime,jdbcType=TIMESTAMP},
`trigger_code`= #{triggerCode}, trigger_code= #{triggerCode},
`trigger_msg`= #{triggerMsg}, trigger_msg= #{triggerMsg},
`executor_address`= #{executorAddress}, executor_address= #{executorAddress},
`executor_handler`=#{executorHandler}, executor_handler=#{executorHandler},
`executor_param`= #{executorParam}, executor_param= #{executorParam},
`executor_sharding_param`= #{executorShardingParam}, executor_sharding_param= #{executorShardingParam},
`executor_fail_retry_count`= #{executorFailRetryCount} executor_fail_retry_count= #{executorFailRetryCount}
WHERE `id`= #{id} WHERE id= #{id}
</update> </update>
<update id="updateHandleInfo"> <update id="updateHandleInfo">
UPDATE xxl_job_log UPDATE xxl_job_log
SET SET
`handle_time`= #{handleTime}, handle_time= #{handleTime,jdbcType=TIMESTAMP},
`handle_code`= #{handleCode}, handle_code= #{handleCode},
`handle_msg`= #{handleMsg} handle_msg= #{handleMsg}
WHERE `id`= #{id} WHERE id= #{id}
</update> </update>
<delete id="delete" > <delete id="delete" >
@ -197,26 +202,14 @@
<if test="clearBeforeTime != null"> <if test="clearBeforeTime != null">
AND trigger_time <![CDATA[ <= ]]> #{clearBeforeTime} AND trigger_time <![CDATA[ <= ]]> #{clearBeforeTime}
</if> </if>
<if test="clearBeforeNum gt 0"> <if test="recentLogIds != null and recentLogIds.size() gt 0">
AND id NOT in( AND id NOT in
SELECT id FROM( <foreach collection="recentLogIds" item="item" open="(" close=")" separator=",">
SELECT id FROM xxl_job_log AS t #{item}
<trim prefix="WHERE" prefixOverrides="AND | OR" > </foreach>
<if test="jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND t.job_id = #{jobId}
</if>
</trim>
ORDER BY t.trigger_time desc
LIMIT 0, #{clearBeforeNum}
) t1
)
</if> </if>
</trim> </trim>
order by id asc order by id asc
LIMIT #{pagesize}
</select> </select>
<delete id="clearLog" > <delete id="clearLog" >
@ -228,34 +221,46 @@
</delete> </delete>
<select id="findFailJobLogIds" resultType="long" > <select id="findFailJobLogIds" resultType="long" >
SELECT id FROM `xxl_job_log` SELECT id FROM xxl_job_log
WHERE !( WHERE not(
(trigger_code in (0, 200) and handle_code = 0) (trigger_code in (0, 200) and handle_code = 0)
OR OR
(handle_code = 200) (handle_code = 200)
) )
AND `alarm_status` = 0 AND alarm_status = 0
ORDER BY id ASC ORDER BY id ASC
LIMIT #{pagesize}
</select> </select>
<update id="updateAlarmStatus" > <update id="updateAlarmStatus" >
UPDATE xxl_job_log UPDATE xxl_job_log
SET SET
`alarm_status` = #{newAlarmStatus} alarm_status = #{newAlarmStatus}
WHERE `id`= #{logId} AND `alarm_status` = #{oldAlarmStatus} WHERE id= #{logId} AND alarm_status = #{oldAlarmStatus}
</update> </update>
<select id="findLostJobIds" resultType="long" > <select id="findLostJobIds" resultType="long" >
SELECT t.id SELECT id
FROM xxl_job_log AS t FROM xxl_job_log
WHERE t.trigger_code = 200 WHERE trigger_code = 200
and t.handle_code = 0 and handle_code = 0
and t.trigger_time <![CDATA[ <= ]]> #{losedTime} and trigger_time <![CDATA[ <= ]]> #{losedTime}
and t.executor_address not in ( and executor_address not in (
SELECT t2.registry_value SELECT xxl_job_registry.registry_value
FROM xxl_job_registry AS t2 FROM xxl_job_registry
) )
</select> </select>
<select id="findRecentLogs" resultType="long">
SELECT id FROM xxl_job_log t
<trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="jobGroup gt 0">
AND t.job_group = #{jobGroup}
</if>
<if test="jobId gt 0">
AND t.job_id = #{jobId}
</if>
</trim>
ORDER BY t.trigger_time DESC
</select>
</mapper> </mapper>

@ -12,25 +12,27 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.trigger_day, trigger_day,
t.running_count, running_count,
t.suc_count, suc_count,
t.fail_count fail_count
</sql> </sql>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogReport" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobLogReport">
INSERT INTO xxl_job_log_report ( INSERT INTO xxl_job_log_report (
`trigger_day`, id,
`running_count`, trigger_day,
`suc_count`, running_count,
`fail_count` suc_count,
fail_count
) VALUES ( ) VALUES (
#{triggerDay}, #{id},
#{triggerDay,jdbcType=TIMESTAMP},
#{runningCount}, #{runningCount},
#{sucCount}, #{sucCount},
#{failCount} #{failCount}
); )
<!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id"> <!--<selectKey resultType="java.lang.Integer" order="AFTER" keyProperty="id">
SELECT LAST_INSERT_ID() SELECT LAST_INSERT_ID()
</selectKey>--> </selectKey>-->
@ -38,17 +40,17 @@
<update id="update" > <update id="update" >
UPDATE xxl_job_log_report UPDATE xxl_job_log_report
SET `running_count` = #{runningCount}, SET running_count = #{runningCount},
`suc_count` = #{sucCount}, suc_count = #{sucCount},
`fail_count` = #{failCount} fail_count = #{failCount}
WHERE `trigger_day` = #{triggerDay} WHERE trigger_day = #{triggerDay,jdbcType=TIMESTAMP}
</update> </update>
<select id="queryLogReport" resultMap="XxlJobLogReport"> <select id="queryLogReport" resultMap="XxlJobLogReport">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_log_report AS t FROM xxl_job_log_report
WHERE t.trigger_day between #{triggerDayFrom} and #{triggerDayTo} WHERE trigger_day between #{triggerDayFrom} and #{triggerDayTo}
ORDER BY t.trigger_day ASC ORDER BY trigger_day ASC
</select> </select>
<select id="queryLogReportTotal" resultMap="XxlJobLogReport"> <select id="queryLogReportTotal" resultMap="XxlJobLogReport">
@ -56,7 +58,7 @@
SUM(running_count) running_count, SUM(running_count) running_count,
SUM(suc_count) suc_count, SUM(suc_count) suc_count,
SUM(fail_count) fail_count SUM(fail_count) fail_count
FROM xxl_job_log_report AS t FROM xxl_job_log_report
</select> </select>
</mapper> </mapper>

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.xxl.job.admin.dao.XxlJobMachineDao">
<resultMap id="XxlJobMachine" type="com.xxl.job.admin.core.model.XxlJobMachine" >
<result column="machine_ip" property="machineIp" />
<result column="machine_id" property="machineId" />
<result column="add_time" property="addTime" />
<result column="heart_last_time" property="heartLastTime" />
</resultMap>
<sql id="Base_Column_List">
machine_ip,machine_id,add_time,heart_last_time
</sql>
<select id="selectByHostIp" resultMap="XxlJobMachine">
SELECT machine_ip,machine_id,add_time,heart_last_time
FROM XXL_JOB_MACHINE
WHERE machine_ip = #{machineIp}
</select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobMachine" >
INSERT INTO xxl_job_machine ( machine_ip, machine_id, add_time, heart_last_time)
VALUES ( #{xxlJobMachine.machineIp}, #{xxlJobMachine.machineId}, #{xxlJobMachine.addTime,jdbcType=TIMESTAMP}, #{xxlJobMachine.heartLastTime,jdbcType=TIMESTAMP})
</insert>
<update id="update" parameterType="object" >
UPDATE xxl_job_machine
SET heart_last_time = #{heartLastTime,jdbcType=TIMESTAMP}
WHERE machine_ip = #{machineIp}
</update>
<select id="selectMaxMachineId" resultType="Integer">
SELECT MAX(machine_id) FROM xxl_job_machine
</select>
</mapper>

@ -12,20 +12,20 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.registry_group, registry_group,
t.registry_key, registry_key,
t.registry_value, registry_value,
t.update_time update_time
</sql> </sql>
<select id="findDead" parameterType="java.util.HashMap" resultType="java.lang.Integer" > <select id="findDead" parameterType="java.util.HashMap" resultType="java.lang.Long" >
SELECT t.id SELECT id
FROM xxl_job_registry AS t FROM xxl_job_registry
WHERE t.update_time <![CDATA[ < ]]> DATE_ADD(#{nowTime},INTERVAL -#{timeout} SECOND) WHERE update_time <![CDATA[ < ]]> #{date,jdbcType=TIMESTAMP}
</select> </select>
<delete id="removeDead" parameterType="java.lang.Integer" > <delete id="removeDead" parameterType="java.lang.Long" >
DELETE FROM xxl_job_registry DELETE FROM xxl_job_registry
WHERE id in WHERE id in
<foreach collection="ids" item="item" open="(" close=")" separator="," > <foreach collection="ids" item="item" open="(" close=")" separator="," >
@ -35,21 +35,21 @@
<select id="findAll" parameterType="java.util.HashMap" resultMap="XxlJobRegistry"> <select id="findAll" parameterType="java.util.HashMap" resultMap="XxlJobRegistry">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_registry AS t FROM xxl_job_registry
WHERE t.update_time <![CDATA[ > ]]> DATE_ADD(#{nowTime},INTERVAL -#{timeout} SECOND) WHERE update_time <![CDATA[ > ]]> #{date,jdbcType=TIMESTAMP}
</select> </select>
<update id="registryUpdate" > <update id="registryUpdate" >
UPDATE xxl_job_registry UPDATE xxl_job_registry
SET `update_time` = #{updateTime} SET update_time = #{updateTime,jdbcType=TIMESTAMP}
WHERE `registry_group` = #{registryGroup} WHERE registry_group = #{registryGroup}
AND `registry_key` = #{registryKey} AND registry_key = #{registryKey}
AND `registry_value` = #{registryValue} AND registry_value = #{registryValue}
</update> </update>
<insert id="registrySave" > <insert id="registrySave" >
INSERT INTO xxl_job_registry( `registry_group` , `registry_key` , `registry_value`, `update_time`) INSERT INTO xxl_job_registry( registry_group , registry_key , registry_value, update_time,id)
VALUES( #{registryGroup} , #{registryKey} , #{registryValue}, #{updateTime}) VALUES( #{registryGroup} , #{registryKey} , #{registryValue}, #{updateTime,jdbcType=TIMESTAMP},#{id})
</insert> </insert>
<delete id="registryDelete" > <delete id="registryDelete" >

@ -12,59 +12,60 @@
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
t.id, id,
t.username, username,
t.password, password,
t.role, role,
t.permission permission
</sql> </sql>
<select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobUser"> <select id="pageList" parameterType="java.util.HashMap" resultMap="XxlJobUser">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_user AS t FROM xxl_job_user
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="username != null and username != ''"> <if test="username != null and username != ''">
AND t.username like CONCAT(CONCAT('%', #{username}), '%') AND username like CONCAT(CONCAT('%', #{username}), '%')
</if> </if>
<if test="role gt -1"> <if test="role gt -1">
AND t.role = #{role} AND role = #{role}
</if> </if>
</trim> </trim>
ORDER BY username ASC ORDER BY username ASC
LIMIT #{offset}, #{pagesize}
</select> </select>
<select id="pageListCount" parameterType="java.util.HashMap" resultType="int"> <select id="pageListCount" parameterType="java.util.HashMap" resultType="int">
SELECT count(1) SELECT count(1)
FROM xxl_job_user AS t FROM xxl_job_user
<trim prefix="WHERE" prefixOverrides="AND | OR" > <trim prefix="WHERE" prefixOverrides="AND | OR" >
<if test="username != null and username != ''"> <if test="username != null and username != ''">
AND t.username like CONCAT(CONCAT('%', #{username}), '%') AND username like CONCAT(CONCAT('%', #{username}), '%')
</if> </if>
<if test="role gt -1"> <if test="role gt -1">
AND t.role = #{role} AND role = #{role}
</if> </if>
</trim> </trim>
</select> </select>
<select id="loadByUserName" parameterType="java.util.HashMap" resultMap="XxlJobUser"> <select id="loadByUserName" parameterType="java.util.HashMap" resultMap="XxlJobUser">
SELECT <include refid="Base_Column_List" /> SELECT <include refid="Base_Column_List" />
FROM xxl_job_user AS t FROM xxl_job_user
WHERE t.username = #{username} WHERE username = #{username}
</select> </select>
<insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobUser" useGeneratedKeys="true" keyProperty="id" > <insert id="save" parameterType="com.xxl.job.admin.core.model.XxlJobUser" >
INSERT INTO xxl_job_user ( INSERT INTO xxl_job_user (
id,
username, username,
password, password,
role, role,
permission permission
) VALUES ( ) VALUES (
#{id},
#{username}, #{username},
#{password}, #{password},
#{role}, #{role},
#{permission} #{permission}
); )
</insert> </insert>
<update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobUser" > <update id="update" parameterType="com.xxl.job.admin.core.model.XxlJobUser" >

@ -19,8 +19,8 @@ public class XxlJobInfoDaoTest {
@Test @Test
public void pageList(){ public void pageList(){
List<XxlJobInfo> list = xxlJobInfoDao.pageList(0, 20, 0, -1, null, null, null); List<XxlJobInfo> list = xxlJobInfoDao.pageList(0, -1, null, null, null);
int list_count = xxlJobInfoDao.pageListCount(0, 20, 0, -1, null, null, null); int list_count = xxlJobInfoDao.pageListCount(0, -1, null, null, null);
System.out.println(list); System.out.println(list);
System.out.println(list_count); System.out.println(list_count);

@ -1,5 +1,6 @@
package com.xxl.job.admin.dao; package com.xxl.job.admin.dao;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.model.XxlJobLog; import com.xxl.job.admin.core.model.XxlJobLog;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -19,8 +20,9 @@ public class XxlJobLogDaoTest {
@Test @Test
public void test(){ public void test(){
List<XxlJobLog> list = xxlJobLogDao.pageList(0, 10, 1, 1, null, null, 1); PageHelper.startPage(1,10);
int list_count = xxlJobLogDao.pageListCount(0, 10, 1, 1, null, null, 1); List<XxlJobLog> list = xxlJobLogDao.pageList(1, 1, null, null, 1);
int list_count = xxlJobLogDao.pageListCount(1, 1, null, null, 1);
XxlJobLog log = new XxlJobLog(); XxlJobLog log = new XxlJobLog();
log.setJobGroup(1); log.setJobGroup(1);
@ -46,7 +48,7 @@ public class XxlJobLogDaoTest {
dto = xxlJobLogDao.load(log.getId()); dto = xxlJobLogDao.load(log.getId());
List<Long> ret4 = xxlJobLogDao.findClearLogIds(1, 1, new Date(), 100, 100); List<Long> ret4 = xxlJobLogDao.findClearLogIds(1, 1, new Date(), null);
int ret2 = xxlJobLogDao.delete(log.getJobId()); int ret2 = xxlJobLogDao.delete(log.getJobId());

@ -1,5 +1,6 @@
package com.xxl.job.admin.dao; package com.xxl.job.admin.dao;
import com.github.pagehelper.PageHelper;
import com.xxl.job.admin.core.model.XxlJobLogGlue; import com.xxl.job.admin.core.model.XxlJobLogGlue;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -31,7 +32,9 @@ public class XxlJobLogGlueDaoTest {
List<XxlJobLogGlue> list = xxlJobLogGlueDao.findByJobId(1); List<XxlJobLogGlue> list = xxlJobLogGlueDao.findByJobId(1);
int ret2 = xxlJobLogGlueDao.removeOld(1, 1); PageHelper.startPage(1,30);
List<Long> ids = xxlJobLogGlueDao.findIds(1);
int ret2 = xxlJobLogGlueDao.removeOld(1, ids);
int ret3 =xxlJobLogGlueDao.deleteByJobId(1); int ret3 =xxlJobLogGlueDao.deleteByJobId(1);
} }

@ -1,6 +1,8 @@
package com.xxl.job.admin.dao; package com.xxl.job.admin.dao;
import com.xxl.job.admin.core.id.GenerateId;
import com.xxl.job.admin.core.model.XxlJobRegistry; import com.xxl.job.admin.core.model.XxlJobRegistry;
import com.xxl.job.core.util.DateUtil;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest;
@ -17,17 +19,20 @@ public class XxlJobRegistryDaoTest {
@Resource @Resource
private XxlJobRegistryDao xxlJobRegistryDao; private XxlJobRegistryDao xxlJobRegistryDao;
@Resource
private GenerateId generateId;
@Test @Test
public void test(){ public void test(){
int ret = xxlJobRegistryDao.registryUpdate("g1", "k1", "v1", new Date()); int ret = xxlJobRegistryDao.registryUpdate("g1", "k1", "v1", new Date());
if (ret < 1) { if (ret < 1) {
ret = xxlJobRegistryDao.registrySave("g1", "k1", "v1", new Date()); ret = xxlJobRegistryDao.registrySave("g1", "k1", "v1", new Date(), generateId.getId());
} }
List<XxlJobRegistry> list = xxlJobRegistryDao.findAll(1, new Date()); Date time = DateUtil.addSecond(new Date(),1);
List<XxlJobRegistry> list = xxlJobRegistryDao.findAll(time);
int ret2 = xxlJobRegistryDao.removeDead(Arrays.asList(1)); int ret2 = xxlJobRegistryDao.removeDead(Arrays.asList(1L));
} }
} }

@ -36,7 +36,7 @@ public class ExecutorBizTest {
public void idleBeat(){ public void idleBeat(){
ExecutorBiz executorBiz = new ExecutorBizClient(addressUrl, accessToken); ExecutorBiz executorBiz = new ExecutorBizClient(addressUrl, accessToken);
final int jobId = 0; final long jobId = 0;
// Act // Act
final ReturnT<String> retval = executorBiz.idleBeat(new IdleBeatParam(jobId)); final ReturnT<String> retval = executorBiz.idleBeat(new IdleBeatParam(jobId));
@ -75,7 +75,7 @@ public class ExecutorBizTest {
public void kill(){ public void kill(){
ExecutorBiz executorBiz = new ExecutorBizClient(addressUrl, accessToken); ExecutorBiz executorBiz = new ExecutorBizClient(addressUrl, accessToken);
final int jobId = 0; final long jobId = 0;
// Act // Act
final ReturnT<String> retval = executorBiz.kill(new KillParam(jobId)); final ReturnT<String> retval = executorBiz.kill(new KillParam(jobId));

@ -72,6 +72,13 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!--PageHelper -->
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper-spring-boot-starter</artifactId>
<version>${mybatis.pagehelper.version}</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

@ -10,18 +10,18 @@ public class IdleBeatParam implements Serializable {
public IdleBeatParam() { public IdleBeatParam() {
} }
public IdleBeatParam(int jobId) { public IdleBeatParam(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }
private int jobId; private long jobId;
public int getJobId() { public long getJobId() {
return jobId; return jobId;
} }
public void setJobId(int jobId) { public void setJobId(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }

@ -10,18 +10,18 @@ public class KillParam implements Serializable {
public KillParam() { public KillParam() {
} }
public KillParam(int jobId) { public KillParam(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }
private int jobId; private long jobId;
public int getJobId() { public long getJobId() {
return jobId; return jobId;
} }
public void setJobId(int jobId) { public void setJobId(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }

@ -8,7 +8,7 @@ import java.io.Serializable;
public class TriggerParam implements Serializable{ public class TriggerParam implements Serializable{
private static final long serialVersionUID = 42L; private static final long serialVersionUID = 42L;
private int jobId; private long jobId;
private String executorHandler; private String executorHandler;
private String executorParams; private String executorParams;
@ -26,11 +26,11 @@ public class TriggerParam implements Serializable{
private int broadcastTotal; private int broadcastTotal;
public int getJobId() { public long getJobId() {
return jobId; return jobId;
} }
public void setJobId(int jobId) { public void setJobId(long jobId) {
this.jobId = jobId; this.jobId = jobId;
} }

@ -86,7 +86,7 @@ public class XxlJobExecutor {
// destory jobThreadRepository // destory jobThreadRepository
if (jobThreadRepository.size() > 0) { if (jobThreadRepository.size() > 0) {
for (Map.Entry<Integer, JobThread> item: jobThreadRepository.entrySet()) { for (Map.Entry<Long, JobThread> item: jobThreadRepository.entrySet()) {
JobThread oldJobThread = removeJobThread(item.getKey(), "web container destroy and kill the job."); JobThread oldJobThread = removeJobThread(item.getKey(), "web container destroy and kill the job.");
// wait for job thread push result to callback queue // wait for job thread push result to callback queue
if (oldJobThread != null) { if (oldJobThread != null) {
@ -174,8 +174,8 @@ public class XxlJobExecutor {
// ---------------------- job thread repository ---------------------- // ---------------------- job thread repository ----------------------
private static ConcurrentMap<Integer, JobThread> jobThreadRepository = new ConcurrentHashMap<Integer, JobThread>(); private static ConcurrentMap<Long, JobThread> jobThreadRepository = new ConcurrentHashMap<Long, JobThread>();
public static JobThread registJobThread(int jobId, IJobHandler handler, String removeOldReason){ public static JobThread registJobThread(long jobId, IJobHandler handler, String removeOldReason){
JobThread newJobThread = new JobThread(jobId, handler); JobThread newJobThread = new JobThread(jobId, handler);
newJobThread.start(); newJobThread.start();
logger.info(">>>>>>>>>>> xxl-job regist JobThread success, jobId:{}, handler:{}", new Object[]{jobId, handler}); logger.info(">>>>>>>>>>> xxl-job regist JobThread success, jobId:{}, handler:{}", new Object[]{jobId, handler});
@ -188,7 +188,7 @@ public class XxlJobExecutor {
return newJobThread; return newJobThread;
} }
public static JobThread removeJobThread(int jobId, String removeOldReason){ public static JobThread removeJobThread(long jobId, String removeOldReason){
JobThread oldJobThread = jobThreadRepository.remove(jobId); JobThread oldJobThread = jobThreadRepository.remove(jobId);
if (oldJobThread != null) { if (oldJobThread != null) {
oldJobThread.toStop(removeOldReason); oldJobThread.toStop(removeOldReason);
@ -198,7 +198,7 @@ public class XxlJobExecutor {
} }
return null; return null;
} }
public static JobThread loadJobThread(int jobId){ public static JobThread loadJobThread(long jobId){
JobThread jobThread = jobThreadRepository.get(jobId); JobThread jobThread = jobThreadRepository.get(jobId);
return jobThread; return jobThread;
} }

@ -15,12 +15,12 @@ import java.io.File;
*/ */
public class ScriptJobHandler extends IJobHandler { public class ScriptJobHandler extends IJobHandler {
private int jobId; private long jobId;
private long glueUpdatetime; private long glueUpdatetime;
private String gluesource; private String gluesource;
private GlueTypeEnum glueType; private GlueTypeEnum glueType;
public ScriptJobHandler(int jobId, long glueUpdatetime, String gluesource, GlueTypeEnum glueType){ public ScriptJobHandler(long jobId, long glueUpdatetime, String gluesource, GlueTypeEnum glueType){
this.jobId = jobId; this.jobId = jobId;
this.glueUpdatetime = glueUpdatetime; this.glueUpdatetime = glueUpdatetime;
this.gluesource = gluesource; this.gluesource = gluesource;

@ -27,7 +27,7 @@ import java.util.concurrent.*;
public class JobThread extends Thread{ public class JobThread extends Thread{
private static Logger logger = LoggerFactory.getLogger(JobThread.class); private static Logger logger = LoggerFactory.getLogger(JobThread.class);
private int jobId; private long jobId;
private IJobHandler handler; private IJobHandler handler;
private LinkedBlockingQueue<TriggerParam> triggerQueue; private LinkedBlockingQueue<TriggerParam> triggerQueue;
private Set<Long> triggerLogIdSet; // avoid repeat trigger for the same TRIGGER_LOG_ID private Set<Long> triggerLogIdSet; // avoid repeat trigger for the same TRIGGER_LOG_ID
@ -39,7 +39,7 @@ public class JobThread extends Thread{
private int idleTimes = 0; // idel times private int idleTimes = 0; // idel times
public JobThread(int jobId, IJobHandler handler) { public JobThread(long jobId, IJobHandler handler) {
this.jobId = jobId; this.jobId = jobId;
this.handler = handler; this.handler = handler;
this.triggerQueue = new LinkedBlockingQueue<TriggerParam>(); this.triggerQueue = new LinkedBlockingQueue<TriggerParam>();

@ -143,6 +143,10 @@ public class DateUtil {
return add(date, Calendar.MINUTE, amount); return add(date, Calendar.MINUTE, amount);
} }
public static Date addSecond(final Date date, final int amount) {
return add(date, Calendar.SECOND, amount);
}
private static Date add(final Date date, final int calendarField, final int amount) { private static Date add(final Date date, final int calendarField, final int amount) {
if (date == null) { if (date == null) {
return null; return null;

Loading…
Cancel
Save