整合PowerJob分布式任务调度框架
This commit is contained in:
parent
216a9d196a
commit
1167bede79
48
pom.xml
48
pom.xml
@ -13,19 +13,18 @@
|
|||||||
<description>Ruoyi-Flex管理系统</description>
|
<description>Ruoyi-Flex管理系统</description>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<revision>4.1.5</revision>
|
<revision>4.1.6-SNAPSHOT</revision>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
<java.version>17</java.version>
|
<java.version>17</java.version>
|
||||||
<spring-boot.version>3.1.2</spring-boot.version>
|
<spring-boot.version>3.1.3</spring-boot.version>
|
||||||
<mybatis-flex.version>1.5.7</mybatis-flex.version>
|
<mybatis-flex.version>1.5.8</mybatis-flex.version>
|
||||||
<satoken.version>1.35.0.RC</satoken.version>
|
<satoken.version>1.35.0.RC</satoken.version>
|
||||||
<mysql.version>8.0.33</mysql.version>
|
<mysql.version>8.0.33</mysql.version>
|
||||||
<maven-jar-plugin.version>3.1.1</maven-jar-plugin.version>
|
<maven-jar-plugin.version>3.1.1</maven-jar-plugin.version>
|
||||||
<HikariCP.version>5.0.1</HikariCP.version>
|
<HikariCP.version>5.0.1</HikariCP.version>
|
||||||
<bitwalker.version>1.21</bitwalker.version>
|
<bitwalker.version>1.21</bitwalker.version>
|
||||||
<kaptcha.version>2.3.3</kaptcha.version>
|
<kaptcha.version>2.3.3</kaptcha.version>
|
||||||
<pagehelper.boot.version>1.4.6</pagehelper.boot.version>
|
|
||||||
<pagehelper.version>5.3.3</pagehelper.version>
|
<pagehelper.version>5.3.3</pagehelper.version>
|
||||||
<fastjson.version>2.0.34</fastjson.version>
|
<fastjson.version>2.0.34</fastjson.version>
|
||||||
<oshi.version>6.4.4</oshi.version>
|
<oshi.version>6.4.4</oshi.version>
|
||||||
@ -38,17 +37,19 @@
|
|||||||
<servlet-api.version>6.0.0</servlet-api.version>
|
<servlet-api.version>6.0.0</servlet-api.version>
|
||||||
<guava.version>32.1.1-jre</guava.version>
|
<guava.version>32.1.1-jre</guava.version>
|
||||||
<flatten-maven-plugin.version>1.5.0</flatten-maven-plugin.version>
|
<flatten-maven-plugin.version>1.5.0</flatten-maven-plugin.version>
|
||||||
<springdoc.version>2.1.0</springdoc.version>
|
<springdoc.version>2.2.0</springdoc.version>
|
||||||
<springdoc-openapi-starter-common.version>2.1.0</springdoc-openapi-starter-common.version>
|
<springdoc-openapi-starter-common.version>2.1.0</springdoc-openapi-starter-common.version>
|
||||||
<therapi-runtime-javadoc.version>0.15.0</therapi-runtime-javadoc.version>
|
<therapi-runtime-javadoc.version>0.15.0</therapi-runtime-javadoc.version>
|
||||||
|
<snakeyaml.version>1.33</snakeyaml.version>
|
||||||
<lombok.version>1.18.28</lombok.version>
|
<lombok.version>1.18.28</lombok.version>
|
||||||
<mapstruct-plus.version>1.3.5</mapstruct-plus.version>
|
<mapstruct-plus.version>1.3.5</mapstruct-plus.version>
|
||||||
<mapstruct-plus.lombok.version>0.2.0</mapstruct-plus.lombok.version>
|
<mapstruct-plus.lombok.version>0.2.0</mapstruct-plus.lombok.version>
|
||||||
<hutool.version>5.8.21</hutool.version>
|
<hutool.version>5.8.21</hutool.version>
|
||||||
<redisson.version>3.23.2</redisson.version>
|
<redisson.version>3.23.3</redisson.version>
|
||||||
<lock4j.version>2.2.4</lock4j.version>
|
<lock4j.version>2.2.4</lock4j.version>
|
||||||
<alibaba-ttl.version>2.14.3</alibaba-ttl.version>
|
<alibaba-ttl.version>2.14.3</alibaba-ttl.version>
|
||||||
<spring-boot-admin.version>3.1.4</spring-boot-admin.version>
|
<spring-boot-admin.version>3.1.5</spring-boot-admin.version>
|
||||||
|
<powerjob.version>4.3.5</powerjob.version>
|
||||||
|
|
||||||
<!-- 插件版本 -->
|
<!-- 插件版本 -->
|
||||||
<maven-jar-plugin.version>3.2.2</maven-jar-plugin.version>
|
<maven-jar-plugin.version>3.2.2</maven-jar-plugin.version>
|
||||||
@ -175,11 +176,6 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- pagehelper 分页插件 -->
|
<!-- pagehelper 分页插件 -->
|
||||||
<dependency>
|
|
||||||
<groupId>com.github.pagehelper</groupId>
|
|
||||||
<artifactId>pagehelper-spring-boot-starter</artifactId>
|
|
||||||
<version>${pagehelper.boot.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.pagehelper</groupId>
|
<groupId>com.github.pagehelper</groupId>
|
||||||
<artifactId>pagehelper</artifactId>
|
<artifactId>pagehelper</artifactId>
|
||||||
@ -305,6 +301,13 @@
|
|||||||
<version>${lombok.version}</version>
|
<version>${lombok.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- 临时修复 snakeyaml 漏洞 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.yaml</groupId>
|
||||||
|
<artifactId>snakeyaml</artifactId>
|
||||||
|
<version>${snakeyaml.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!--redisson-->
|
<!--redisson-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.redisson</groupId>
|
<groupId>org.redisson</groupId>
|
||||||
@ -343,10 +346,29 @@
|
|||||||
<version>${spring-boot-admin.version}</version>
|
<version>${spring-boot-admin.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- PowerJob -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>tech.powerjob</groupId>
|
||||||
|
<artifactId>powerjob-worker-spring-boot-starter</artifactId>
|
||||||
|
<version>${powerjob.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>tech.powerjob</groupId>
|
||||||
|
<artifactId>powerjob-official-processors</artifactId>
|
||||||
|
<version>${powerjob.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- 定时任务-->
|
<!-- 定时任务-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>com.ruoyi</groupId>-->
|
||||||
|
<!-- <artifactId>ruoyi-quartz</artifactId>-->
|
||||||
|
<!-- <version>${revision}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
|
||||||
|
<!-- PowerJob定时任务处理器-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ruoyi</groupId>
|
<groupId>com.ruoyi</groupId>
|
||||||
<artifactId>ruoyi-quartz</artifactId>
|
<artifactId>ruoyi-job</artifactId>
|
||||||
<version>${revision}</version>
|
<version>${revision}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
@ -37,12 +37,6 @@
|
|||||||
<artifactId>postgresql</artifactId>
|
<artifactId>postgresql</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- mybatis-flex -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.mybatis-flex</groupId>
|
|
||||||
<artifactId>mybatis-flex-spring-boot-starter</artifactId>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<!-- system模块-->
|
<!-- system模块-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ruoyi</groupId>
|
<groupId>com.ruoyi</groupId>
|
||||||
@ -50,9 +44,15 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- 定时任务-->
|
<!-- 定时任务-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>com.ruoyi</groupId>-->
|
||||||
|
<!-- <artifactId>ruoyi-quartz</artifactId>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
|
||||||
|
<!-- PowerJob定时任务处理器-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ruoyi</groupId>
|
<groupId>com.ruoyi</groupId>
|
||||||
<artifactId>ruoyi-quartz</artifactId>
|
<artifactId>ruoyi-job</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- 代码生成-->
|
<!-- 代码生成-->
|
||||||
@ -73,6 +73,12 @@
|
|||||||
<artifactId>spring-boot-admin-starter-client</artifactId>
|
<artifactId>spring-boot-admin-starter-client</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- powerjob 客户端 -->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>tech.powerjob</groupId>-->
|
||||||
|
<!-- <artifactId>powerjob-worker-spring-boot-starter</artifactId>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@ -81,16 +87,13 @@
|
|||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
<version>${spring-boot.version}</version>
|
<version>${spring-boot.version}</version>
|
||||||
<configuration>
|
<executions>
|
||||||
<!-- <fork>true</fork> <!– 如果没有该配置,devtools不会生效 –>-->
|
<execution>
|
||||||
</configuration>
|
<goals>
|
||||||
<!-- <executions>-->
|
<goal>repackage</goal>
|
||||||
<!-- <execution>-->
|
</goals>
|
||||||
<!-- <goals>-->
|
</execution>
|
||||||
<!-- <goal>repackage</goal>-->
|
</executions>
|
||||||
<!-- </goals>-->
|
|
||||||
<!-- </execution>-->
|
|
||||||
<!-- </executions>-->
|
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
@ -1,2 +1,4 @@
|
|||||||
restart.include.json=/com.alibaba.fastjson.*.jar
|
restart.include.json=/com.alibaba.fastjson.*.jar
|
||||||
restart.include.mybatis-flex=/mybatis-flex.*.jar
|
restart.include.mapper=/mapper-[\\w-\\.].jar
|
||||||
|
restart.include.pagehelper=/pagehelper-[\\w-\\.].jar
|
||||||
|
restart.include.mybatis-flex=/mybatis-flex-[\\w-\\.]+jar
|
||||||
|
@ -5,10 +5,7 @@ mybatis-flex:
|
|||||||
# 不支持多包, 如有需要可在注解配置 或 提升扫包等级:com.**.**.mapper
|
# 不支持多包, 如有需要可在注解配置 或 提升扫包等级:com.**.**.mapper
|
||||||
mapperPackage: com.ruoyi.**.mapper
|
mapperPackage: com.ruoyi.**.mapper
|
||||||
# 配置mapper的扫描,找到所有的mapper.xml映射文件
|
# 配置mapper的扫描,找到所有的mapper.xml映射文件
|
||||||
mapper-locations: classpath*:mapper/**/*Mapper.xml
|
mapperLocations: classpath*:mapper/**/*Mapper.xml
|
||||||
cacheEnabled: true
|
|
||||||
useGeneratedKeys: true
|
|
||||||
defaultExecutorType: SIMPLE
|
|
||||||
|
|
||||||
#本部分(Configuration)的配置都为 MyBatis 原生支持的配置,有关配置请参考:https://mybatis.org/mybatis-3/zh/configuration.html#%E8%AE%BE%E7%BD%AE%EF%BC%88settings%EF%BC%89
|
#本部分(Configuration)的配置都为 MyBatis 原生支持的配置,有关配置请参考:https://mybatis.org/mybatis-3/zh/configuration.html#%E8%AE%BE%E7%BD%AE%EF%BC%88settings%EF%BC%89
|
||||||
configuration:
|
configuration:
|
||||||
@ -24,17 +21,19 @@ mybatis-flex:
|
|||||||
# 关闭日志记录 (可单纯使用 p6spy 分析) org.apache.ibatis.logging.nologging.NoLoggingImpl
|
# 关闭日志记录 (可单纯使用 p6spy 分析) org.apache.ibatis.logging.nologging.NoLoggingImpl
|
||||||
# 默认日志输出 org.apache.ibatis.logging.slf4j.Slf4jImpl
|
# 默认日志输出 org.apache.ibatis.logging.slf4j.Slf4jImpl
|
||||||
logImpl: org.apache.ibatis.logging.slf4j.Slf4jImpl
|
logImpl: org.apache.ibatis.logging.slf4j.Slf4jImpl
|
||||||
|
cacheEnabled: true
|
||||||
|
useGeneratedKeys: true
|
||||||
|
defaultExecutorType: SIMPLE
|
||||||
|
|
||||||
# MyBatis-Flex全局配置
|
# MyBatis-Flex全局配置
|
||||||
global-config:
|
global-config:
|
||||||
# 是否控制台打印 MyBatis-Flex 的 LOGO 及版本号
|
# 是否控制台打印 MyBatis-Flex 的 LOGO 及版本号
|
||||||
print-banner: false
|
print-banner: true
|
||||||
# 逻辑删除数据存在标记值
|
# 逻辑删除数据存在标记值
|
||||||
normal-value-of-logic-delete: 0
|
normal-value-of-logic-delete: 0
|
||||||
# 逻辑删除数据存在标记值
|
# 逻辑删除数据存在标记值
|
||||||
deleted-value-of-logic-delete: 1
|
deleted-value-of-logic-delete: 1
|
||||||
|
|
||||||
|
|
||||||
datasource:
|
datasource:
|
||||||
# 数据源-1
|
# 数据源-1
|
||||||
PrimaryDS:
|
PrimaryDS:
|
||||||
@ -126,3 +125,21 @@ spring.boot.admin.client:
|
|||||||
service-host-type: IP
|
service-host-type: IP
|
||||||
username: ruoyi
|
username: ruoyi
|
||||||
password: 123456
|
password: 123456
|
||||||
|
|
||||||
|
--- # powerjob 配置
|
||||||
|
powerjob:
|
||||||
|
worker:
|
||||||
|
# 如何开启调度中心请查看文档教程
|
||||||
|
enabled: true
|
||||||
|
# 需要先在 powerjob 登录页执行应用注册后才能使用
|
||||||
|
app-name: ruoyi-worker
|
||||||
|
# 28080 端口 随着主应用端口飘逸 避免集群冲突
|
||||||
|
port: 2${server.port}
|
||||||
|
protocol: http
|
||||||
|
server-address: 127.0.0.1:7700
|
||||||
|
store-strategy: disk
|
||||||
|
enable-test-mode: false
|
||||||
|
max-appended-wf-context-length: 4096
|
||||||
|
max-result-length: 4096
|
||||||
|
|
||||||
|
|
||||||
|
@ -129,3 +129,19 @@ spring.boot.admin.client:
|
|||||||
username: ruoyi
|
username: ruoyi
|
||||||
password: 123456
|
password: 123456
|
||||||
|
|
||||||
|
--- # powerjob 配置
|
||||||
|
powerjob:
|
||||||
|
worker:
|
||||||
|
# 如何开启调度中心请查看文档教程
|
||||||
|
enabled: true
|
||||||
|
# 需要先在 powerjob 登录页执行应用注册后才能使用
|
||||||
|
app-name: ruoyi-worker
|
||||||
|
enable-test-mode: false
|
||||||
|
max-appended-wf-context-length: 4096
|
||||||
|
max-result-length: 4096
|
||||||
|
# 28080 端口 随着主应用端口飘逸 避免集群冲突
|
||||||
|
port: 2${server.port}
|
||||||
|
protocol: http
|
||||||
|
server-address: 127.0.0.1:7700
|
||||||
|
store-strategy: disk
|
||||||
|
|
||||||
|
@ -53,6 +53,7 @@ logging:
|
|||||||
level:
|
level:
|
||||||
com.ruoyi: debug
|
com.ruoyi: debug
|
||||||
org.springframework: warn
|
org.springframework: warn
|
||||||
|
tech.powerjob.worker.background: warn
|
||||||
config: classpath:logback.xml
|
config: classpath:logback.xml
|
||||||
|
|
||||||
# 用户配置
|
# 用户配置
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
<module>ruoyi-common-bom</module>
|
<module>ruoyi-common-bom</module>
|
||||||
<module>ruoyi-common-core</module>
|
<module>ruoyi-common-core</module>
|
||||||
<module>ruoyi-common-excel</module>
|
<module>ruoyi-common-excel</module>
|
||||||
|
<module>ruoyi-common-job</module>
|
||||||
<module>ruoyi-common-json</module>
|
<module>ruoyi-common-json</module>
|
||||||
<module>ruoyi-common-log</module>
|
<module>ruoyi-common-log</module>
|
||||||
<module>ruoyi-common-orm</module>
|
<module>ruoyi-common-orm</module>
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
</description>
|
</description>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<revision>4.1.5</revision>
|
<revision>4.1.6-SNAPSHOT</revision>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
@ -33,6 +33,13 @@
|
|||||||
<version>${revision}</version>
|
<version>${revision}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- 定时任务模块 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-common-job</artifactId>
|
||||||
|
<version>${revision}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- 序列化模块 -->
|
<!-- 序列化模块 -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ruoyi</groupId>
|
<groupId>com.ruoyi</groupId>
|
||||||
|
@ -144,10 +144,6 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- pagehelper 分页插件 -->
|
<!-- pagehelper 分页插件 -->
|
||||||
<dependency>
|
|
||||||
<groupId>com.github.pagehelper</groupId>
|
|
||||||
<artifactId>pagehelper-spring-boot-starter</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.pagehelper</groupId>
|
<groupId>com.github.pagehelper</groupId>
|
||||||
<artifactId>pagehelper</artifactId>
|
<artifactId>pagehelper</artifactId>
|
||||||
|
46
ruoyi-common/ruoyi-common-job/pom.xml
Normal file
46
ruoyi-common/ruoyi-common-job/pom.xml
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-common</artifactId>
|
||||||
|
<version>${revision}</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<artifactId>ruoyi-common-job</artifactId>
|
||||||
|
|
||||||
|
<description>
|
||||||
|
ruoyi-common-job 定时任务
|
||||||
|
</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-autoconfigure</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!--PowerJob-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>tech.powerjob</groupId>
|
||||||
|
<artifactId>powerjob-worker-spring-boot-starter</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>tech.powerjob</groupId>
|
||||||
|
<artifactId>powerjob-official-processors</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-common-core</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
</project>
|
@ -0,0 +1,21 @@
|
|||||||
|
package com.ruoyi.common.job.config;
|
||||||
|
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||||
|
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||||
|
import tech.powerjob.worker.PowerJobWorker;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 启动定时任务
|
||||||
|
* @author yhan219
|
||||||
|
* @since 2023/6/2
|
||||||
|
*/
|
||||||
|
@Configuration
|
||||||
|
@ConditionalOnBean(PowerJobWorker.class)
|
||||||
|
@ConditionalOnProperty(prefix = "powerjob.worker", name = "enabled", havingValue = "true")
|
||||||
|
@EnableScheduling
|
||||||
|
public class PowerJobConfig {
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -58,61 +58,4 @@ public class MyBatisFlexConfig {
|
|||||||
|
|
||||||
//TODO:多租户配置
|
//TODO:多租户配置
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// @Bean
|
|
||||||
// public MybatisPlusInterceptor mybatisPlusInterceptor() {
|
|
||||||
// MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
|
|
||||||
// // 数据权限处理
|
|
||||||
// interceptor.addInnerInterceptor(dataPermissionInterceptor());
|
|
||||||
// // 分页插件
|
|
||||||
// interceptor.addInnerInterceptor(paginationInnerInterceptor());
|
|
||||||
// // 乐观锁插件
|
|
||||||
// interceptor.addInnerInterceptor(optimisticLockerInnerInterceptor());
|
|
||||||
// return interceptor;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * 数据权限拦截器
|
|
||||||
// */
|
|
||||||
// public PlusDataPermissionInterceptor dataPermissionInterceptor() {
|
|
||||||
// return new PlusDataPermissionInterceptor();
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * 分页插件,自动识别数据库类型
|
|
||||||
// */
|
|
||||||
// public PaginationInnerInterceptor paginationInnerInterceptor() {
|
|
||||||
// PaginationInnerInterceptor paginationInnerInterceptor = new PaginationInnerInterceptor();
|
|
||||||
// // 设置最大单页限制数量,默认 500 条,-1 不受限制
|
|
||||||
// paginationInnerInterceptor.setMaxLimit(-1L);
|
|
||||||
// // 分页合理化
|
|
||||||
// paginationInnerInterceptor.setOverflow(true);
|
|
||||||
// return paginationInnerInterceptor;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * 乐观锁插件
|
|
||||||
// */
|
|
||||||
// public OptimisticLockerInnerInterceptor optimisticLockerInnerInterceptor() {
|
|
||||||
// return new OptimisticLockerInnerInterceptor();
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * 元对象字段填充控制器
|
|
||||||
// */
|
|
||||||
// @Bean
|
|
||||||
// public MetaObjectHandler metaObjectHandler() {
|
|
||||||
// return new InjectionMetaObjectHandler();
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * 使用网卡信息绑定雪花生成器
|
|
||||||
// * 防止集群雪花ID重复
|
|
||||||
// */
|
|
||||||
// @Bean
|
|
||||||
// public IdentifierGenerator idGenerator() {
|
|
||||||
// return new DefaultIdentifierGenerator(NetUtil.getLocalhost());
|
|
||||||
// }
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ public class MyBatisFlexInitConfig implements MyBatisFlexCustomizer {
|
|||||||
FlexGlobalConfig.KeyConfig keyConfig = new FlexGlobalConfig.KeyConfig();
|
FlexGlobalConfig.KeyConfig keyConfig = new FlexGlobalConfig.KeyConfig();
|
||||||
keyConfig.setKeyType(KeyType.Generator);
|
keyConfig.setKeyType(KeyType.Generator);
|
||||||
keyConfig.setValue(KeyGenerators.snowFlakeId);
|
keyConfig.setValue(KeyGenerators.snowFlakeId);
|
||||||
//keyConfig.setBefore(true);
|
keyConfig.setBefore(true);
|
||||||
globalConfig.setKeyConfig(keyConfig);
|
globalConfig.setKeyConfig(keyConfig);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
<module>ruoyi-monitor</module>
|
<module>ruoyi-monitor</module>
|
||||||
|
<module>ruoyi-powerjob-server</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
FROM findepi/graalvm:java17-native
|
|
||||||
|
|
||||||
MAINTAINER Lion Li
|
|
||||||
|
|
||||||
RUN mkdir -p /ruoyi/monitor/logs
|
|
||||||
|
|
||||||
WORKDIR /ruoyi/monitor
|
|
||||||
|
|
||||||
EXPOSE 9090
|
|
||||||
|
|
||||||
ADD ./target/ruoyi-monitor-admin.jar ./app.jar
|
|
||||||
|
|
||||||
ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "app.jar"]
|
|
@ -63,9 +63,6 @@
|
|||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
<version>${spring-boot.version}</version>
|
<version>${spring-boot.version}</version>
|
||||||
<configuration>
|
|
||||||
<!-- <fork>true</fork> <!– 如果没有该配置,devtools不会生效 –>-->
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<goals>
|
<goals>
|
||||||
|
76
ruoyi-extra/ruoyi-powerjob-server/pom.xml
Normal file
76
ruoyi-extra/ruoyi-powerjob-server/pom.xml
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||||
|
<parent>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-extra</artifactId>
|
||||||
|
<version>${revision}</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<artifactId>ruoyi-powerjob-server</artifactId>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<spring-boot.version>2.7.14</spring-boot.version>
|
||||||
|
<spring-boot-admin.version>2.7.10</spring-boot-admin.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencyManagement>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-parent</artifactId>
|
||||||
|
<version>${spring-boot.version}</version>
|
||||||
|
<type>pom</type>
|
||||||
|
<scope>import</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</dependencyManagement>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
|
||||||
|
<!-- PowerJob server-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>tech.powerjob</groupId>
|
||||||
|
<artifactId>powerjob-server-starter</artifactId>
|
||||||
|
<version>${powerjob.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>de.codecentric</groupId>
|
||||||
|
<artifactId>spring-boot-admin-starter-client</artifactId>
|
||||||
|
<version>${spring-boot-admin.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- servlet包:不能使用6高版本,否则无法启动 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>jakarta.servlet</groupId>
|
||||||
|
<artifactId>jakarta.servlet-api</artifactId>
|
||||||
|
<version>4.0.4</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<finalName>${project.artifactId}</finalName>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
|
<version>${spring-boot.version}</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>repackage</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
</project>
|
@ -0,0 +1,25 @@
|
|||||||
|
package com.ruoyi.powerjob;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.boot.SpringApplication;
|
||||||
|
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||||
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||||
|
import tech.powerjob.server.common.utils.PropertyUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* powerjob 启动程序
|
||||||
|
*
|
||||||
|
* @author yhan219
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@EnableScheduling
|
||||||
|
@SpringBootApplication(scanBasePackages = "tech.powerjob.server")
|
||||||
|
public class PowerJobServerApplication {
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
PropertyUtils.init();
|
||||||
|
SpringApplication.run(tech.powerjob.server.PowerJobServerApplication.class, args);
|
||||||
|
log.info("文档地址: https://www.yuque.com/powerjob/guidence/problem");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,50 @@
|
|||||||
|
oms.env=dev
|
||||||
|
|
||||||
|
####### Database properties(Configure according to the the environment) #######
|
||||||
|
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||||
|
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
|
||||||
|
spring.datasource.core.username=root
|
||||||
|
spring.datasource.core.password=Root@369
|
||||||
|
spring.datasource.core.maximum-pool-size=20
|
||||||
|
spring.datasource.core.minimum-idle=5
|
||||||
|
|
||||||
|
# 监控配置
|
||||||
|
# 客户端开关
|
||||||
|
spring.boot.admin.client.enabled=true
|
||||||
|
# 设置 Spring Boot Admin Server 地址
|
||||||
|
spring.boot.admin.client.url: http://localhost:9090/admin
|
||||||
|
spring.boot.admin.client.instance.service-host-type=IP
|
||||||
|
spring.boot.admin.client.username=ruoyi
|
||||||
|
spring.boot.admin.client.password=123456
|
||||||
|
|
||||||
|
####### MongoDB properties(Non-core configuration properties) #######
|
||||||
|
####### delete mongodb config to disable mongodb #######
|
||||||
|
oms.mongodb.enable=false
|
||||||
|
#spring.data.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
|
||||||
|
|
||||||
|
####### Email properties(Non-core configuration properties) #######
|
||||||
|
####### Delete the following code to disable the mail #######
|
||||||
|
#spring.mail.host=smtp.163.com
|
||||||
|
#spring.mail.username=zqq@163.com
|
||||||
|
#spring.mail.password=GOFZPNARMVKCGONV
|
||||||
|
#spring.mail.properties.mail.smtp.auth=true
|
||||||
|
#spring.mail.properties.mail.smtp.starttls.enable=true
|
||||||
|
#spring.mail.properties.mail.smtp.starttls.required=true
|
||||||
|
|
||||||
|
####### DingTalk properties(Non-core configuration properties) #######
|
||||||
|
####### Delete the following code to disable the DingTalk #######
|
||||||
|
#oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
|
||||||
|
#oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
|
||||||
|
#oms.alarm.ding.agent-id=847044348
|
||||||
|
|
||||||
|
####### Resource cleaning properties #######
|
||||||
|
oms.instanceinfo.retention=1
|
||||||
|
oms.container.retention.local=1
|
||||||
|
oms.container.retention.remote=-1
|
||||||
|
|
||||||
|
####### Cache properties #######
|
||||||
|
oms.instance.metadata.cache.size=1024
|
||||||
|
|
||||||
|
####### Threshold in precise fetching server(0~100). 100 means full detection of server, in which #######
|
||||||
|
####### split-brain could be avoided while performance overhead would increase. #######
|
||||||
|
oms.accurate.select.server.percentage = 50
|
@ -0,0 +1,50 @@
|
|||||||
|
oms.env=prod
|
||||||
|
|
||||||
|
####### Database properties(Configure according to the the environment) #######
|
||||||
|
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||||
|
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
|
||||||
|
spring.datasource.core.username=root
|
||||||
|
spring.datasource.core.password=Root@369
|
||||||
|
spring.datasource.core.maximum-pool-size=20
|
||||||
|
spring.datasource.core.minimum-idle=5
|
||||||
|
|
||||||
|
# 监控配置
|
||||||
|
# 客户端开关
|
||||||
|
spring.boot.admin.client.enabled=true
|
||||||
|
# 设置 Spring Boot Admin Server 地址
|
||||||
|
spring.boot.admin.client.url: http://localhost:9090/admin
|
||||||
|
spring.boot.admin.client.instance.service-host-type=IP
|
||||||
|
spring.boot.admin.client.username=ruoyi
|
||||||
|
spring.boot.admin.client.password=123456
|
||||||
|
|
||||||
|
####### MongoDB properties(Non-core configuration properties) #######
|
||||||
|
####### delete mongodb config to disable mongodb #######
|
||||||
|
oms.mongodb.enable=false
|
||||||
|
#spring.data.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
|
||||||
|
|
||||||
|
####### Email properties(Non-core configuration properties) #######
|
||||||
|
####### Delete the following code to disable the mail #######
|
||||||
|
#spring.mail.host=smtp.163.com
|
||||||
|
#spring.mail.username=zqq@163.com
|
||||||
|
#spring.mail.password=GOFZPNARMVKCGONV
|
||||||
|
#spring.mail.properties.mail.smtp.auth=true
|
||||||
|
#spring.mail.properties.mail.smtp.starttls.enable=true
|
||||||
|
#spring.mail.properties.mail.smtp.starttls.required=true
|
||||||
|
|
||||||
|
####### DingTalk properties(Non-core configuration properties) #######
|
||||||
|
####### Delete the following code to disable the DingTalk #######
|
||||||
|
#oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
|
||||||
|
#oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
|
||||||
|
#oms.alarm.ding.agent-id=847044348
|
||||||
|
|
||||||
|
####### Resource cleaning properties #######
|
||||||
|
oms.instanceinfo.retention=7
|
||||||
|
oms.container.retention.local=7
|
||||||
|
oms.container.retention.remote=-1
|
||||||
|
|
||||||
|
####### Cache properties #######
|
||||||
|
oms.instance.metadata.cache.size=2048
|
||||||
|
|
||||||
|
####### Threshold in precise fetching server(0~100). 100 means full detection of server, in which #######
|
||||||
|
####### split-brain could be avoided while performance overhead would increase. #######
|
||||||
|
oms.accurate.select.server.percentage = 50
|
@ -0,0 +1,33 @@
|
|||||||
|
# Http server port
|
||||||
|
server.port=7700
|
||||||
|
|
||||||
|
spring.profiles.active=@profiles.active@
|
||||||
|
spring.main.banner-mode=log
|
||||||
|
spring.jpa.open-in-view=false
|
||||||
|
spring.data.mongodb.repositories.type=none
|
||||||
|
logging.level.org.mongodb=warn
|
||||||
|
logging.level.tech.powerjob.server=warn
|
||||||
|
logging.level.MONITOR_LOGGER_DB_OPERATION=warn
|
||||||
|
logging.level.MONITOR_LOGGER_WORKER_HEART_BEAT=warn
|
||||||
|
logging.config: classpath:logback.xml
|
||||||
|
|
||||||
|
# Configuration for uploading files.
|
||||||
|
spring.servlet.multipart.enabled=true
|
||||||
|
spring.servlet.multipart.file-size-threshold=0
|
||||||
|
spring.servlet.multipart.max-file-size=209715200
|
||||||
|
spring.servlet.multipart.max-request-size=209715200
|
||||||
|
|
||||||
|
###### PowerJob transporter configuration ######
|
||||||
|
oms.transporter.active.protocols=AKKA,HTTP
|
||||||
|
oms.transporter.main.protocol=HTTP
|
||||||
|
oms.akka.port=10086
|
||||||
|
oms.http.port=10010
|
||||||
|
# Prefix for all tables. Default empty string. Config if you have needs, i.e. pj_
|
||||||
|
oms.table-prefix=pj_
|
||||||
|
|
||||||
|
# Actuator 监控端点的配置项
|
||||||
|
spring.application.name: ruoyi-powerjob-server
|
||||||
|
management.endpoints.web.exposure.include=*
|
||||||
|
management.endpoint.health.show-details=ALWAYS
|
||||||
|
management.endpoint.logfile.external-file=./logs/ruoyi-powerjob-server.log
|
||||||
|
management.health.mongo.enabled=${oms.mongodb.enable}
|
@ -0,0 +1,34 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<configuration debug="false" scan="true" scanPeriod="1 seconds">
|
||||||
|
|
||||||
|
<contextName>logback</contextName>
|
||||||
|
<property name="log.path" value="./logs/ruoyi-powerjob-server"/>
|
||||||
|
<property name="console.log.pattern"
|
||||||
|
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
|
||||||
|
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
|
||||||
|
|
||||||
|
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
|
<encoder>
|
||||||
|
<pattern>${console.log.pattern}</pattern>
|
||||||
|
<charset>utf-8</charset>
|
||||||
|
</encoder>
|
||||||
|
</appender>
|
||||||
|
|
||||||
|
<appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||||
|
<file>${log.path}.log</file>
|
||||||
|
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||||
|
<fileNamePattern>${log.path}.%d{yyyy-MM-dd}.log</fileNamePattern>
|
||||||
|
<!-- 日志最大的历史 60天 -->
|
||||||
|
<maxHistory>60</maxHistory>
|
||||||
|
</rollingPolicy>
|
||||||
|
<encoder>
|
||||||
|
<pattern>${log.pattern}</pattern>
|
||||||
|
</encoder>
|
||||||
|
</appender>
|
||||||
|
|
||||||
|
<root level="info">
|
||||||
|
<appender-ref ref="console"/>
|
||||||
|
<appender-ref ref="file"/>
|
||||||
|
</root>
|
||||||
|
|
||||||
|
</configuration>
|
@ -13,7 +13,8 @@
|
|||||||
<modules>
|
<modules>
|
||||||
<module>ruoyi-demo</module>
|
<module>ruoyi-demo</module>
|
||||||
<module>ruoyi-generator</module>
|
<module>ruoyi-generator</module>
|
||||||
<module>ruoyi-quartz</module>
|
<!-- <module>ruoyi-quartz</module>-->
|
||||||
|
<module>ruoyi-job</module>
|
||||||
<module>ruoyi-system</module>
|
<module>ruoyi-system</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
|
35
ruoyi-modules/ruoyi-job/pom.xml
Normal file
35
ruoyi-modules/ruoyi-job/pom.xml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-modules</artifactId>
|
||||||
|
<version>${revision}</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<artifactId>ruoyi-job</artifactId>
|
||||||
|
|
||||||
|
<description>
|
||||||
|
任务调度
|
||||||
|
</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
|
||||||
|
<!-- 通用json工具-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-common-json</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- 定时任务-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.ruoyi</groupId>
|
||||||
|
<artifactId>ruoyi-common-job</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
||||||
|
|
@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* 演示用例
|
||||||
|
* copy from https://github.com/PowerJob/PowerJob/tree/master/powerjob-worker-samples/src/main/java/tech/powerjob/samples
|
||||||
|
*/
|
||||||
|
|
||||||
|
package com.ruoyi.job;
|
@ -0,0 +1,56 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.common.utils.NetUtils;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskResult;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BroadcastProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 广播处理器 示例
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/4/17
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class BroadcastProcessorDemo implements BroadcastProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult preProcess(TaskContext context) {
|
||||||
|
System.out.println("===== BroadcastProcessorDemo#preProcess ======");
|
||||||
|
context.getOmsLogger().info("BroadcastProcessorDemo#preProcess, current host: {}", NetUtils.getLocalHost());
|
||||||
|
if ("rootFailed".equals(context.getJobParams())) {
|
||||||
|
return new ProcessResult(false, "console need failed");
|
||||||
|
} else {
|
||||||
|
return new ProcessResult(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext taskContext) throws Exception {
|
||||||
|
OmsLogger logger = taskContext.getOmsLogger();
|
||||||
|
System.out.println("===== BroadcastProcessorDemo#process ======");
|
||||||
|
logger.info("BroadcastProcessorDemo#process, current host: {}", NetUtils.getLocalHost());
|
||||||
|
long sleepTime = 1000;
|
||||||
|
try {
|
||||||
|
sleepTime = Long.parseLong(taskContext.getJobParams());
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warn("[BroadcastProcessor] parse sleep time failed!", e);
|
||||||
|
}
|
||||||
|
Thread.sleep(Math.max(sleepTime, 1000));
|
||||||
|
return new ProcessResult(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult postProcess(TaskContext context, List<TaskResult> taskResults) {
|
||||||
|
System.out.println("===== BroadcastProcessorDemo#postProcess ======");
|
||||||
|
context.getOmsLogger().info("BroadcastProcessorDemo#postProcess, current host: {}, taskResult: {}", NetUtils.getLocalHost(), taskResults);
|
||||||
|
return new ProcessResult(true, "success");
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,41 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSONObject;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.official.processors.util.CommonUtils;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LogTestProcessor
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2022/9/18
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
public class LogTestProcessor implements BasicProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
|
||||||
|
final OmsLogger omsLogger = context.getOmsLogger();
|
||||||
|
final String parseParams = CommonUtils.parseParams(context);
|
||||||
|
final JSONObject config = Optional.ofNullable(JSONObject.parseObject(parseParams)).orElse(new JSONObject());
|
||||||
|
|
||||||
|
final long loopTimes = Optional.ofNullable(config.getLong("loopTimes")).orElse(1000L);
|
||||||
|
|
||||||
|
for (int i = 0; i < loopTimes; i++) {
|
||||||
|
omsLogger.debug("[DEBUG] one DEBUG log in {}", new Date());
|
||||||
|
omsLogger.info("[INFO] one INFO log in {}", new Date());
|
||||||
|
omsLogger.warn("[WARN] one WARN log in {}", new Date());
|
||||||
|
omsLogger.error("[ERROR] one ERROR log in {}", new Date());
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ProcessResult(true);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,93 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import com.ruoyi.common.json.utils.JsonUtils;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.MapProcessor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Map处理器 示例
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/4/18
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
public class MapProcessorDemo implements MapProcessor {
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 每一批发送任务大小
|
||||||
|
*/
|
||||||
|
private static final int BATCH_SIZE = 100;
|
||||||
|
/**
|
||||||
|
* 发送的批次
|
||||||
|
*/
|
||||||
|
private static final int BATCH_NUM = 5;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
|
||||||
|
log.info("============== MapProcessorDemo#process ==============");
|
||||||
|
log.info("isRootTask:{}", isRootTask());
|
||||||
|
log.info("taskContext:{}", JsonUtils.toJsonString(context));
|
||||||
|
|
||||||
|
if (isRootTask()) {
|
||||||
|
log.info("==== MAP ====");
|
||||||
|
List<SubTask> subTasks = Lists.newLinkedList();
|
||||||
|
for (int j = 0; j < BATCH_NUM; j++) {
|
||||||
|
SubTask subTask = new SubTask();
|
||||||
|
subTask.siteId = j;
|
||||||
|
subTask.itemIds = Lists.newLinkedList();
|
||||||
|
subTasks.add(subTask);
|
||||||
|
for (int i = 0; i < BATCH_SIZE; i++) {
|
||||||
|
subTask.itemIds.add(i + j * 100);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
map(subTasks, "MAP_TEST_TASK");
|
||||||
|
return new ProcessResult(true, "map successfully");
|
||||||
|
} else {
|
||||||
|
|
||||||
|
log.info("==== PROCESS ====");
|
||||||
|
SubTask subTask = (SubTask) context.getSubTask();
|
||||||
|
for (Integer itemId : subTask.getItemIds()) {
|
||||||
|
if (Thread.interrupted()) {
|
||||||
|
// 任务被中断
|
||||||
|
log.info("job has been stop! so stop to process subTask: {} => {}", subTask.getSiteId(), itemId);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
log.info("processing subTask: {} => {}", subTask.getSiteId(), itemId);
|
||||||
|
int max = Integer.MAX_VALUE >> 7;
|
||||||
|
for (int i = 0; ; i++) {
|
||||||
|
// 模拟耗时操作
|
||||||
|
if (i > max) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 测试在 Map 任务中追加上下文
|
||||||
|
context.getWorkflowContext().appendData2WfContext("Yasuo", "A sword's poor company for a long road.");
|
||||||
|
boolean b = ThreadLocalRandom.current().nextBoolean();
|
||||||
|
if (context.getCurrentRetryTimes() >= 1) {
|
||||||
|
// 重试的话一定会成功
|
||||||
|
b = true;
|
||||||
|
}
|
||||||
|
return new ProcessResult(b, "RESULT:" + b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class SubTask {
|
||||||
|
private Integer siteId;
|
||||||
|
private List<Integer> itemIds;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,93 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import cn.hutool.core.lang.Dict;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import lombok.ToString;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import com.ruoyi.common.json.utils.JsonUtils;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskResult;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MapReduce 处理器示例
|
||||||
|
* 控制台参数:{"batchSize": 100, "batchNum": 2}
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/4/17
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class MapReduceProcessorDemo implements MapReduceProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
|
||||||
|
OmsLogger omsLogger = context.getOmsLogger();
|
||||||
|
|
||||||
|
log.info("============== TestMapReduceProcessor#process ==============");
|
||||||
|
log.info("isRootTask:{}", isRootTask());
|
||||||
|
log.info("taskContext:{}", JsonUtils.toJsonString(context));
|
||||||
|
|
||||||
|
// 根据控制台参数获取MR批次及子任务大小
|
||||||
|
final Dict jobParams = JsonUtils.parseMap(context.getJobParams());
|
||||||
|
|
||||||
|
Integer batchSize = (Integer) jobParams.getOrDefault("batchSize", 100);
|
||||||
|
Integer batchNum = (Integer) jobParams.getOrDefault("batchNum", 10);
|
||||||
|
|
||||||
|
if (isRootTask()) {
|
||||||
|
log.info("==== MAP ====");
|
||||||
|
omsLogger.info("[DemoMRProcessor] start root task~");
|
||||||
|
List<TestSubTask> subTasks = Lists.newLinkedList();
|
||||||
|
for (int j = 0; j < batchNum; j++) {
|
||||||
|
for (int i = 0; i < batchSize; i++) {
|
||||||
|
int x = j * batchSize + i;
|
||||||
|
subTasks.add(new TestSubTask("name" + x, x));
|
||||||
|
}
|
||||||
|
map(subTasks, "MAP_TEST_TASK");
|
||||||
|
subTasks.clear();
|
||||||
|
}
|
||||||
|
omsLogger.info("[DemoMRProcessor] map success~");
|
||||||
|
return new ProcessResult(true, "MAP_SUCCESS");
|
||||||
|
} else {
|
||||||
|
log.info("==== NORMAL_PROCESS ====");
|
||||||
|
omsLogger.info("[DemoMRProcessor] process subTask: {}.", JsonUtils.toJsonString(context.getSubTask()));
|
||||||
|
log.info("subTask: {}", JsonUtils.toJsonString(context.getSubTask()));
|
||||||
|
Thread.sleep(1000);
|
||||||
|
if (context.getCurrentRetryTimes() == 0) {
|
||||||
|
return new ProcessResult(false, "FIRST_FAILED");
|
||||||
|
} else {
|
||||||
|
return new ProcessResult(true, "PROCESS_SUCCESS");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
|
||||||
|
log.info("================ MapReduceProcessorDemo#reduce ================");
|
||||||
|
log.info("TaskContext: {}", JsonUtils.toJsonString(context));
|
||||||
|
log.info("List<TaskResult>: {}", JsonUtils.toJsonString(taskResults));
|
||||||
|
context.getOmsLogger().info("MapReduce job finished, result is {}.", taskResults);
|
||||||
|
|
||||||
|
boolean success = ThreadLocalRandom.current().nextBoolean();
|
||||||
|
return new ProcessResult(success, context + ": " + success);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@ToString
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public static class TestSubTask {
|
||||||
|
private String name;
|
||||||
|
private int age;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Echo009
|
||||||
|
* @since 2022/4/27
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
public class SimpleProcessor implements BasicProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
|
||||||
|
OmsLogger logger = context.getOmsLogger();
|
||||||
|
|
||||||
|
String jobParams = Optional.ofNullable(context.getJobParams()).orElse("S");
|
||||||
|
logger.info("Current context:{}", context.getWorkflowContext());
|
||||||
|
logger.info("Current job params:{}", jobParams);
|
||||||
|
|
||||||
|
// 测试中文问题 #581
|
||||||
|
if (jobParams.contains("CN")) {
|
||||||
|
return new ProcessResult(true, "任务成功啦!!!");
|
||||||
|
}
|
||||||
|
|
||||||
|
return jobParams.contains("F") ? new ProcessResult(false) : new ProcessResult(true, "yeah!");
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,51 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 单机处理器 示例
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/4/17
|
||||||
|
*/
|
||||||
|
@Slf4j
|
||||||
|
@Component
|
||||||
|
public class StandaloneProcessorDemo implements BasicProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
OmsLogger omsLogger = context.getOmsLogger();
|
||||||
|
omsLogger.info("StandaloneProcessorDemo start process,context is {}.", context);
|
||||||
|
omsLogger.info("Notice! If you want this job process failed, your jobParams need to be 'failed'");
|
||||||
|
omsLogger.info("Let's test the exception~");
|
||||||
|
// 测试异常日志
|
||||||
|
try {
|
||||||
|
Collections.emptyList().add("277");
|
||||||
|
} catch (Exception e) {
|
||||||
|
omsLogger.error("oh~it seems that we have an exception~", e);
|
||||||
|
}
|
||||||
|
log.info("================ StandaloneProcessorDemo#process ================");
|
||||||
|
log.info("jobParam:{}", context.getJobParams());
|
||||||
|
log.info("instanceParams:{}", context.getInstanceParams());
|
||||||
|
String param;
|
||||||
|
// 解析参数,非处于工作流中时,优先取实例参数(允许动态[instanceParams]覆盖静态参数[jobParams])
|
||||||
|
if (context.getWorkflowContext() == null) {
|
||||||
|
param = StringUtils.isBlank(context.getInstanceParams()) ? context.getJobParams() : context.getInstanceParams();
|
||||||
|
} else {
|
||||||
|
param = context.getJobParams();
|
||||||
|
}
|
||||||
|
// 根据参数判断是否成功
|
||||||
|
boolean success = !"failed".equals(param);
|
||||||
|
omsLogger.info("StandaloneProcessorDemo finished process,success: {}", success);
|
||||||
|
omsLogger.info("anyway, we finished the job successfully~Congratulations!");
|
||||||
|
return new ProcessResult(success, context + ": " + success);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
package com.ruoyi.job.processors;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 测试超时任务(可中断)
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/4/20
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Slf4j
|
||||||
|
public class TimeoutProcessor implements BasicProcessor {
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
long sleepTime = Long.parseLong(context.getJobParams());
|
||||||
|
log.info("TaskInstance({}) will sleep {} ms", context.getInstanceId(), sleepTime);
|
||||||
|
Thread.sleep(Long.parseLong(context.getJobParams()));
|
||||||
|
return new ProcessResult(true, "impossible~~~~QAQ~");
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,36 @@
|
|||||||
|
package com.ruoyi.job.workflow;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import tech.powerjob.worker.core.processor.ProcessResult;
|
||||||
|
import tech.powerjob.worker.core.processor.TaskContext;
|
||||||
|
import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
|
||||||
|
import tech.powerjob.worker.log.OmsLogger;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 工作流测试
|
||||||
|
*
|
||||||
|
* @author tjq
|
||||||
|
* @since 2020/6/2
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Slf4j
|
||||||
|
public class WorkflowStandaloneProcessor implements BasicProcessor {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProcessResult process(TaskContext context) throws Exception {
|
||||||
|
OmsLogger logger = context.getOmsLogger();
|
||||||
|
logger.info("current jobParams: {}", context.getJobParams());
|
||||||
|
logger.info("current context: {}", context.getWorkflowContext());
|
||||||
|
log.info("jobParams:{}", context.getJobParams());
|
||||||
|
log.info("currentContext:{}", JSON.toJSONString(context));
|
||||||
|
|
||||||
|
// 尝试获取上游任务
|
||||||
|
Map<String, String> workflowContext = context.getWorkflowContext().fetchWorkflowContext();
|
||||||
|
log.info("工作流上下文数据:{}", workflowContext);
|
||||||
|
return new ProcessResult(true, context.getJobId() + " process successfully.");
|
||||||
|
}
|
||||||
|
}
|
@ -10,5 +10,8 @@ VUE_APP_BASE_API = '/dev-api'
|
|||||||
# 监控地址
|
# 监控地址
|
||||||
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
|
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
|
||||||
|
|
||||||
|
# powerjob任务调度控制台地址
|
||||||
|
VUE_APP_POWERJOB_ADMIN = 'http://localhost:7700/'
|
||||||
|
|
||||||
# 路由懒加载
|
# 路由懒加载
|
||||||
VUE_CLI_BABEL_TRANSPILE_MODULES = true
|
VUE_CLI_BABEL_TRANSPILE_MODULES = true
|
||||||
|
@ -9,3 +9,6 @@ VUE_APP_BASE_API = '/prod-api'
|
|||||||
|
|
||||||
# 监控地址
|
# 监控地址
|
||||||
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
|
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
|
||||||
|
|
||||||
|
# powerjob任务调度控制台地址
|
||||||
|
VUE_APP_POWERJOB_ADMIN = 'http://localhost:7700/'
|
||||||
|
22
ruoyi-ui/src/views/monitor/powerjob/index.vue
Normal file
22
ruoyi-ui/src/views/monitor/powerjob/index.vue
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
<template>
|
||||||
|
<div>
|
||||||
|
<i-frame :src="url"></i-frame>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
import IFrame from "@/components/iFrame/index.vue";
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: "PowerJob",
|
||||||
|
components: {IFrame},
|
||||||
|
data() {
|
||||||
|
return {
|
||||||
|
url: String
|
||||||
|
};
|
||||||
|
},
|
||||||
|
created() {
|
||||||
|
this.url = process.env.VUE_APP_POWERJOB_ADMIN;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
</script>
|
174
sql/quartz.sql
174
sql/quartz.sql
@ -1,174 +0,0 @@
|
|||||||
DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_LOCKS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_TRIGGERS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
|
|
||||||
DROP TABLE IF EXISTS QRTZ_CALENDARS;
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 1、存储每一个已配置的 jobDetail 的详细信息
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_JOB_DETAILS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
job_name varchar(200) not null comment '任务名称',
|
|
||||||
job_group varchar(200) not null comment '任务组名',
|
|
||||||
description varchar(250) null comment '相关介绍',
|
|
||||||
job_class_name varchar(250) not null comment '执行任务类名称',
|
|
||||||
is_durable varchar(1) not null comment '是否持久化',
|
|
||||||
is_nonconcurrent varchar(1) not null comment '是否并发',
|
|
||||||
is_update_data varchar(1) not null comment '是否更新数据',
|
|
||||||
requests_recovery varchar(1) not null comment '是否接受恢复执行',
|
|
||||||
job_data blob null comment '存放持久化job对象',
|
|
||||||
primary key (sched_name, job_name, job_group)
|
|
||||||
) engine=innodb comment = '任务详细信息表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 2、 存储已配置的 Trigger 的信息
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_name varchar(200) not null comment '触发器的名字',
|
|
||||||
trigger_group varchar(200) not null comment '触发器所属组的名字',
|
|
||||||
job_name varchar(200) not null comment 'qrtz_job_details表job_name的外键',
|
|
||||||
job_group varchar(200) not null comment 'qrtz_job_details表job_group的外键',
|
|
||||||
description varchar(250) null comment '相关介绍',
|
|
||||||
next_fire_time bigint(13) null comment '上一次触发时间(毫秒)',
|
|
||||||
prev_fire_time bigint(13) null comment '下一次触发时间(默认为-1表示不触发)',
|
|
||||||
priority integer null comment '优先级',
|
|
||||||
trigger_state varchar(16) not null comment '触发器状态',
|
|
||||||
trigger_type varchar(8) not null comment '触发器的类型',
|
|
||||||
start_time bigint(13) not null comment '开始时间',
|
|
||||||
end_time bigint(13) null comment '结束时间',
|
|
||||||
calendar_name varchar(200) null comment '日程表名称',
|
|
||||||
misfire_instr smallint(2) null comment '补偿执行的策略',
|
|
||||||
job_data blob null comment '存放持久化job对象',
|
|
||||||
primary key (sched_name, trigger_name, trigger_group),
|
|
||||||
foreign key (sched_name, job_name, job_group) references QRTZ_JOB_DETAILS(sched_name, job_name, job_group)
|
|
||||||
) engine=innodb comment = '触发器详细信息表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 3、 存储简单的 Trigger,包括重复次数,间隔,以及已触发的次数
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_SIMPLE_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
repeat_count bigint(7) not null comment '重复的次数统计',
|
|
||||||
repeat_interval bigint(12) not null comment '重复的间隔时间',
|
|
||||||
times_triggered bigint(10) not null comment '已经触发的次数',
|
|
||||||
primary key (sched_name, trigger_name, trigger_group),
|
|
||||||
foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
|
|
||||||
) engine=innodb comment = '简单触发器的信息表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 4、 存储 Cron Trigger,包括 Cron 表达式和时区信息
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_CRON_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
cron_expression varchar(200) not null comment 'cron表达式',
|
|
||||||
time_zone_id varchar(80) comment '时区',
|
|
||||||
primary key (sched_name, trigger_name, trigger_group),
|
|
||||||
foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
|
|
||||||
) engine=innodb comment = 'Cron类型的触发器表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 5、 Trigger 作为 Blob 类型存储(用于 Quartz 用户用 JDBC 创建他们自己定制的 Trigger 类型,JobStore 并不知道如何存储实例的时候)
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_BLOB_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
blob_data blob null comment '存放持久化Trigger对象',
|
|
||||||
primary key (sched_name, trigger_name, trigger_group),
|
|
||||||
foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
|
|
||||||
) engine=innodb comment = 'Blob类型的触发器表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 6、 以 Blob 类型存储存放日历信息, quartz可配置一个日历来指定一个时间范围
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_CALENDARS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
calendar_name varchar(200) not null comment '日历名称',
|
|
||||||
calendar blob not null comment '存放持久化calendar对象',
|
|
||||||
primary key (sched_name, calendar_name)
|
|
||||||
) engine=innodb comment = '日历信息表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 7、 存储已暂停的 Trigger 组的信息
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_PAUSED_TRIGGER_GRPS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
primary key (sched_name, trigger_group)
|
|
||||||
) engine=innodb comment = '暂停的触发器表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 8、 存储与已触发的 Trigger 相关的状态信息,以及相联 Job 的执行信息
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_FIRED_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
entry_id varchar(95) not null comment '调度器实例id',
|
|
||||||
trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
instance_name varchar(200) not null comment '调度器实例名',
|
|
||||||
fired_time bigint(13) not null comment '触发的时间',
|
|
||||||
sched_time bigint(13) not null comment '定时器制定的时间',
|
|
||||||
priority integer not null comment '优先级',
|
|
||||||
state varchar(16) not null comment '状态',
|
|
||||||
job_name varchar(200) null comment '任务名称',
|
|
||||||
job_group varchar(200) null comment '任务组名',
|
|
||||||
is_nonconcurrent varchar(1) null comment '是否并发',
|
|
||||||
requests_recovery varchar(1) null comment '是否接受恢复执行',
|
|
||||||
primary key (sched_name, entry_id)
|
|
||||||
) engine=innodb comment = '已触发的触发器表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 9、 存储少量的有关 Scheduler 的状态信息,假如是用于集群中,可以看到其他的 Scheduler 实例
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_SCHEDULER_STATE (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
instance_name varchar(200) not null comment '实例名称',
|
|
||||||
last_checkin_time bigint(13) not null comment '上次检查时间',
|
|
||||||
checkin_interval bigint(13) not null comment '检查间隔时间',
|
|
||||||
primary key (sched_name, instance_name)
|
|
||||||
) engine=innodb comment = '调度器状态表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 10、 存储程序的悲观锁的信息(假如使用了悲观锁)
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_LOCKS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
lock_name varchar(40) not null comment '悲观锁名称',
|
|
||||||
primary key (sched_name, lock_name)
|
|
||||||
) engine=innodb comment = '存储的悲观锁信息表';
|
|
||||||
|
|
||||||
-- ----------------------------
|
|
||||||
-- 11、 Quartz集群实现同步机制的行锁表
|
|
||||||
-- ----------------------------
|
|
||||||
create table QRTZ_SIMPROP_TRIGGERS (
|
|
||||||
sched_name varchar(120) not null comment '调度名称',
|
|
||||||
trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
|
|
||||||
trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
|
|
||||||
str_prop_1 varchar(512) null comment 'String类型的trigger的第一个参数',
|
|
||||||
str_prop_2 varchar(512) null comment 'String类型的trigger的第二个参数',
|
|
||||||
str_prop_3 varchar(512) null comment 'String类型的trigger的第三个参数',
|
|
||||||
int_prop_1 int null comment 'int类型的trigger的第一个参数',
|
|
||||||
int_prop_2 int null comment 'int类型的trigger的第二个参数',
|
|
||||||
long_prop_1 bigint null comment 'long类型的trigger的第一个参数',
|
|
||||||
long_prop_2 bigint null comment 'long类型的trigger的第二个参数',
|
|
||||||
dec_prop_1 numeric(13,4) null comment 'decimal类型的trigger的第一个参数',
|
|
||||||
dec_prop_2 numeric(13,4) null comment 'decimal类型的trigger的第二个参数',
|
|
||||||
bool_prop_1 varchar(1) null comment 'Boolean类型的trigger的第一个参数',
|
|
||||||
bool_prop_2 varchar(1) null comment 'Boolean类型的trigger的第二个参数',
|
|
||||||
primary key (sched_name, trigger_name, trigger_group),
|
|
||||||
foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
|
|
||||||
) engine=innodb comment = '同步机制的行锁表';
|
|
||||||
|
|
||||||
commit;
|
|
236
sql/update.sql
236
sql/update.sql
@ -275,5 +275,241 @@ delete FROM sys_menu WHERE menu_name = "缓存列表"
|
|||||||
-- “服务监控”菜单使用SpringBoot-Admin监控框架:
|
-- “服务监控”菜单使用SpringBoot-Admin监控框架:
|
||||||
UPDATE `sys_menu` SET `path`='admin', `component`='monitor/admin/index', `perms`='monitor:admin:list' WHERE `menu_id`=112;
|
UPDATE `sys_menu` SET `path`='admin', `component`='monitor/admin/index', `perms`='monitor:admin:list' WHERE `menu_id`=112;
|
||||||
|
|
||||||
|
-- update to V4.1.6:
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_app_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_app_info`;
|
||||||
|
CREATE TABLE `pj_app_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`app_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`current_server` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`password` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
UNIQUE INDEX `uidx01_app_info`(`app_name`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 2 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Records of pj_app_info
|
||||||
|
-- ----------------------------
|
||||||
|
INSERT INTO `pj_app_info` VALUES (1, 'ruoyi-worker', '127.0.0.1:10010', '2023-06-13 16:32:59.263000', '2023-07-04 17:25:49.798000', '123456');
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_container_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_container_info`;
|
||||||
|
CREATE TABLE `pj_container_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`app_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`container_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`last_deploy_time` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`source_info` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`source_type` int(0) NULL DEFAULT NULL,
|
||||||
|
`status` int(0) NULL DEFAULT NULL,
|
||||||
|
`version` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `idx01_container_info`(`app_id`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_instance_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_instance_info`;
|
||||||
|
CREATE TABLE `pj_instance_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`actual_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`app_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`expected_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`finished_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`instance_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`instance_params` longtext NULL,
|
||||||
|
`job_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`job_params` longtext NULL,
|
||||||
|
`last_report_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`result` longtext NULL,
|
||||||
|
`running_times` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`status` int(0) NULL DEFAULT NULL,
|
||||||
|
`task_tracker_address` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`type` int(0) NULL DEFAULT NULL,
|
||||||
|
`wf_instance_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `idx01_instance_info`(`job_id`, `status`) USING BTREE,
|
||||||
|
INDEX `idx02_instance_info`(`app_id`, `status`) USING BTREE,
|
||||||
|
INDEX `idx03_instance_info`(`instance_id`, `status`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 4 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_job_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_job_info`;
|
||||||
|
CREATE TABLE `pj_job_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`alarm_config` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`app_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`concurrency` int(0) NULL DEFAULT NULL,
|
||||||
|
`designated_workers` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`dispatch_strategy` int(0) NULL DEFAULT NULL,
|
||||||
|
`execute_type` int(0) NULL DEFAULT NULL,
|
||||||
|
`extra` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`instance_retry_num` int(0) NULL DEFAULT NULL,
|
||||||
|
`instance_time_limit` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`job_description` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`job_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`job_params` longtext NULL,
|
||||||
|
`lifecycle` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`log_config` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`max_instance_num` int(0) NULL DEFAULT NULL,
|
||||||
|
`max_worker_count` int(0) NULL DEFAULT NULL,
|
||||||
|
`min_cpu_cores` double NOT NULL,
|
||||||
|
`min_disk_space` double NOT NULL,
|
||||||
|
`min_memory_space` double NOT NULL,
|
||||||
|
`next_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`notify_user_ids` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`processor_info` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`processor_type` int(0) NULL DEFAULT NULL,
|
||||||
|
`status` int(0) NULL DEFAULT NULL,
|
||||||
|
`tag` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`task_retry_num` int(0) NULL DEFAULT NULL,
|
||||||
|
`time_expression` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`time_expression_type` int(0) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `idx01_job_info`(`app_id`, `status`, `time_expression_type`, `next_trigger_time`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 5 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Records of pj_job_info
|
||||||
|
-- ----------------------------
|
||||||
|
INSERT INTO `pj_job_info` VALUES (1, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 2, 1, NULL, '2023-06-02 15:01:27.717000', '2023-07-04 17:22:12.374000', 1, 0, '', '单机处理器执行测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.StandaloneProcessorDemo', 1, 2, NULL, 1, '30000', 3);
|
||||||
|
INSERT INTO `pj_job_info` VALUES (2, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 2, NULL, '2023-06-02 15:04:45.342000', '2023-07-04 17:22:12.816000', 0, 0, NULL, '广播处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.BroadcastProcessorDemo', 1, 2, NULL, 1, '30000', 3);
|
||||||
|
INSERT INTO `pj_job_info` VALUES (3, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 4, NULL, '2023-06-02 15:13:23.519000', '2023-06-02 16:03:22.421000', 0, 0, NULL, 'Map处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.MapProcessorDemo', 1, 2, NULL, 1, '1000', 3);
|
||||||
|
INSERT INTO `pj_job_info` VALUES (4, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 3, NULL, '2023-06-02 15:45:25.896000', '2023-06-02 16:03:23.125000', 0, 0, NULL, 'MapReduce处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.MapReduceProcessorDemo', 1, 2, NULL, 1, '1000', 3);
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_oms_lock
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_oms_lock`;
|
||||||
|
CREATE TABLE `pj_oms_lock` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`lock_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`max_lock_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`ownerip` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
UNIQUE INDEX `uidx01_oms_lock`(`lock_name`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 6 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_server_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_server_info`;
|
||||||
|
CREATE TABLE `pj_server_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`ip` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
UNIQUE INDEX `uidx01_server_info`(`ip`) USING BTREE,
|
||||||
|
INDEX `idx01_server_info`(`gmt_modified`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 2 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_user_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_user_info`;
|
||||||
|
CREATE TABLE `pj_user_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`email` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`extra` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`password` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`phone` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`username` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`web_hook` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `uidx01_user_info`(`username`) USING BTREE,
|
||||||
|
INDEX `uidx02_user_info`(`email`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_workflow_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_workflow_info`;
|
||||||
|
CREATE TABLE `pj_workflow_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`app_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`extra` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`lifecycle` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`max_wf_instance_num` int(0) NULL DEFAULT NULL,
|
||||||
|
`next_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`notify_user_ids` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`pedag` longtext NULL,
|
||||||
|
`status` int(0) NULL DEFAULT NULL,
|
||||||
|
`time_expression` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`time_expression_type` int(0) NULL DEFAULT NULL,
|
||||||
|
`wf_description` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`wf_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `idx01_workflow_info`(`app_id`, `status`, `time_expression_type`, `next_trigger_time`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_workflow_instance_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_workflow_instance_info`;
|
||||||
|
CREATE TABLE `pj_workflow_instance_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`actual_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`app_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`dag` longtext NULL,
|
||||||
|
`expected_trigger_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`finished_time` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||||
|
`parent_wf_instance_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`result` longtext NULL,
|
||||||
|
`status` int(0) NULL DEFAULT NULL,
|
||||||
|
`wf_context` longtext NULL,
|
||||||
|
`wf_init_params` longtext NULL,
|
||||||
|
`wf_instance_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`workflow_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
UNIQUE INDEX `uidx01_wf_instance`(`wf_instance_id`) USING BTREE,
|
||||||
|
INDEX `idx01_wf_instance`(`workflow_id`, `status`, `app_id`, `expected_trigger_time`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
-- ----------------------------
|
||||||
|
-- Table structure for pj_workflow_node_info
|
||||||
|
-- ----------------------------
|
||||||
|
DROP TABLE IF EXISTS `pj_workflow_node_info`;
|
||||||
|
CREATE TABLE `pj_workflow_node_info` (
|
||||||
|
`id` bigint(0) NOT NULL AUTO_INCREMENT,
|
||||||
|
`app_id` bigint(0) NOT NULL,
|
||||||
|
`enable` bit(1) NOT NULL,
|
||||||
|
`extra` longtext NULL,
|
||||||
|
`gmt_create` datetime(6) NULL,
|
||||||
|
`gmt_modified` datetime(6) NULL,
|
||||||
|
`job_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
`node_name` varchar(255) NULL DEFAULT NULL,
|
||||||
|
`node_params` longtext NULL,
|
||||||
|
`skip_when_failed` bit(1) NOT NULL,
|
||||||
|
`type` int(0) NULL DEFAULT NULL,
|
||||||
|
`workflow_id` bigint(0) NULL DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`id`) USING BTREE,
|
||||||
|
INDEX `idx01_workflow_node_info`(`workflow_id`, `gmt_create`) USING BTREE
|
||||||
|
) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user