diff --git a/pom.xml b/pom.xml
index 1dc0812..7327a3b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,19 +13,18 @@
Ruoyi-Flex管理系统
- 4.1.5
+ 4.1.6-SNAPSHOT
UTF-8
UTF-8
17
- 3.1.2
- 1.5.7
+ 3.1.3
+ 1.5.8
1.35.0.RC
8.0.33
3.1.1
5.0.1
1.21
2.3.3
- 1.4.6
5.3.3
2.0.34
6.4.4
@@ -38,17 +37,19 @@
6.0.0
32.1.1-jre
1.5.0
- 2.1.0
+ 2.2.0
2.1.0
0.15.0
+ 1.33
1.18.28
1.3.5
0.2.0
5.8.21
- 3.23.2
+ 3.23.3
2.2.4
2.14.3
- 3.1.4
+ 3.1.5
+ 4.3.5
3.2.2
@@ -175,11 +176,6 @@
-
- com.github.pagehelper
- pagehelper-spring-boot-starter
- ${pagehelper.boot.version}
-
com.github.pagehelper
pagehelper
@@ -305,6 +301,13 @@
${lombok.version}
+
+
+ org.yaml
+ snakeyaml
+ ${snakeyaml.version}
+
+
org.redisson
@@ -343,10 +346,29 @@
${spring-boot-admin.version}
+
+
+ tech.powerjob
+ powerjob-worker-spring-boot-starter
+ ${powerjob.version}
+
+
+ tech.powerjob
+ powerjob-official-processors
+ ${powerjob.version}
+
+
+
+
+
+
+
+
+
com.ruoyi
- ruoyi-quartz
+ ruoyi-job
${revision}
diff --git a/ruoyi-admin/pom.xml b/ruoyi-admin/pom.xml
index 0a46e5d..5ffaab1 100644
--- a/ruoyi-admin/pom.xml
+++ b/ruoyi-admin/pom.xml
@@ -37,12 +37,6 @@
postgresql
-
-
- com.mybatis-flex
- mybatis-flex-spring-boot-starter
-
-
com.ruoyi
@@ -50,9 +44,15 @@
+
+
+
+
+
+
com.ruoyi
- ruoyi-quartz
+ ruoyi-job
@@ -73,6 +73,12 @@
spring-boot-admin-starter-client
+
+
+
+
+
+
@@ -81,16 +87,13 @@
org.springframework.boot
spring-boot-maven-plugin
${spring-boot.version}
-
-
-
-
-
-
-
-
-
-
+
+
+
+ repackage
+
+
+
org.apache.maven.plugins
diff --git a/ruoyi-admin/src/main/resources/META-INF/spring-devtools.properties b/ruoyi-admin/src/main/resources/META-INF/spring-devtools.properties
index 7d35997..958416c 100644
--- a/ruoyi-admin/src/main/resources/META-INF/spring-devtools.properties
+++ b/ruoyi-admin/src/main/resources/META-INF/spring-devtools.properties
@@ -1,2 +1,4 @@
restart.include.json=/com.alibaba.fastjson.*.jar
-restart.include.mybatis-flex=/mybatis-flex.*.jar
\ No newline at end of file
+restart.include.mapper=/mapper-[\\w-\\.].jar
+restart.include.pagehelper=/pagehelper-[\\w-\\.].jar
+restart.include.mybatis-flex=/mybatis-flex-[\\w-\\.]+jar
diff --git a/ruoyi-admin/src/main/resources/application-dev.yml b/ruoyi-admin/src/main/resources/application-dev.yml
index 0f50a5b..faac8c1 100644
--- a/ruoyi-admin/src/main/resources/application-dev.yml
+++ b/ruoyi-admin/src/main/resources/application-dev.yml
@@ -5,10 +5,7 @@ mybatis-flex:
# 不支持多包, 如有需要可在注解配置 或 提升扫包等级:com.**.**.mapper
mapperPackage: com.ruoyi.**.mapper
# 配置mapper的扫描,找到所有的mapper.xml映射文件
- mapper-locations: classpath*:mapper/**/*Mapper.xml
- cacheEnabled: true
- useGeneratedKeys: true
- defaultExecutorType: SIMPLE
+ mapperLocations: classpath*:mapper/**/*Mapper.xml
#本部分(Configuration)的配置都为 MyBatis 原生支持的配置,有关配置请参考:https://mybatis.org/mybatis-3/zh/configuration.html#%E8%AE%BE%E7%BD%AE%EF%BC%88settings%EF%BC%89
configuration:
@@ -24,17 +21,19 @@ mybatis-flex:
# 关闭日志记录 (可单纯使用 p6spy 分析) org.apache.ibatis.logging.nologging.NoLoggingImpl
# 默认日志输出 org.apache.ibatis.logging.slf4j.Slf4jImpl
logImpl: org.apache.ibatis.logging.slf4j.Slf4jImpl
+ cacheEnabled: true
+ useGeneratedKeys: true
+ defaultExecutorType: SIMPLE
# MyBatis-Flex全局配置
global-config:
# 是否控制台打印 MyBatis-Flex 的 LOGO 及版本号
- print-banner: false
+ print-banner: true
# 逻辑删除数据存在标记值
normal-value-of-logic-delete: 0
# 逻辑删除数据存在标记值
deleted-value-of-logic-delete: 1
-
datasource:
# 数据源-1
PrimaryDS:
@@ -126,3 +125,21 @@ spring.boot.admin.client:
service-host-type: IP
username: ruoyi
password: 123456
+
+--- # powerjob 配置
+powerjob:
+ worker:
+ # 如何开启调度中心请查看文档教程
+ enabled: true
+ # 需要先在 powerjob 登录页执行应用注册后才能使用
+ app-name: ruoyi-worker
+ # 28080 端口 随着主应用端口飘逸 避免集群冲突
+ port: 2${server.port}
+ protocol: http
+ server-address: 127.0.0.1:7700
+ store-strategy: disk
+ enable-test-mode: false
+ max-appended-wf-context-length: 4096
+ max-result-length: 4096
+
+
diff --git a/ruoyi-admin/src/main/resources/application-prod.yml b/ruoyi-admin/src/main/resources/application-prod.yml
index d6e1ef0..2e74cd2 100644
--- a/ruoyi-admin/src/main/resources/application-prod.yml
+++ b/ruoyi-admin/src/main/resources/application-prod.yml
@@ -129,3 +129,19 @@ spring.boot.admin.client:
username: ruoyi
password: 123456
+--- # powerjob 配置
+powerjob:
+ worker:
+ # 如何开启调度中心请查看文档教程
+ enabled: true
+ # 需要先在 powerjob 登录页执行应用注册后才能使用
+ app-name: ruoyi-worker
+ enable-test-mode: false
+ max-appended-wf-context-length: 4096
+ max-result-length: 4096
+ # 28080 端口 随着主应用端口飘逸 避免集群冲突
+ port: 2${server.port}
+ protocol: http
+ server-address: 127.0.0.1:7700
+ store-strategy: disk
+
diff --git a/ruoyi-admin/src/main/resources/application.yml b/ruoyi-admin/src/main/resources/application.yml
index a604715..b72f805 100644
--- a/ruoyi-admin/src/main/resources/application.yml
+++ b/ruoyi-admin/src/main/resources/application.yml
@@ -53,6 +53,7 @@ logging:
level:
com.ruoyi: debug
org.springframework: warn
+ tech.powerjob.worker.background: warn
config: classpath:logback.xml
# 用户配置
diff --git a/ruoyi-common/pom.xml b/ruoyi-common/pom.xml
index 719a7e3..cd477d4 100644
--- a/ruoyi-common/pom.xml
+++ b/ruoyi-common/pom.xml
@@ -14,6 +14,7 @@
ruoyi-common-bom
ruoyi-common-core
ruoyi-common-excel
+ ruoyi-common-job
ruoyi-common-json
ruoyi-common-log
ruoyi-common-orm
diff --git a/ruoyi-common/ruoyi-common-bom/pom.xml b/ruoyi-common/ruoyi-common-bom/pom.xml
index 66f8e2a..62d24b3 100644
--- a/ruoyi-common/ruoyi-common-bom/pom.xml
+++ b/ruoyi-common/ruoyi-common-bom/pom.xml
@@ -14,7 +14,7 @@
- 4.1.5
+ 4.1.6-SNAPSHOT
@@ -33,6 +33,13 @@
${revision}
+
+
+ com.ruoyi
+ ruoyi-common-job
+ ${revision}
+
+
com.ruoyi
diff --git a/ruoyi-common/ruoyi-common-core/pom.xml b/ruoyi-common/ruoyi-common-core/pom.xml
index 0db4dce..69c0ef4 100644
--- a/ruoyi-common/ruoyi-common-core/pom.xml
+++ b/ruoyi-common/ruoyi-common-core/pom.xml
@@ -144,10 +144,6 @@
-
- com.github.pagehelper
- pagehelper-spring-boot-starter
-
com.github.pagehelper
pagehelper
diff --git a/ruoyi-common/ruoyi-common-job/pom.xml b/ruoyi-common/ruoyi-common-job/pom.xml
new file mode 100644
index 0000000..ed83291
--- /dev/null
+++ b/ruoyi-common/ruoyi-common-job/pom.xml
@@ -0,0 +1,46 @@
+
+
+
+ com.ruoyi
+ ruoyi-common
+ ${revision}
+
+ 4.0.0
+
+ ruoyi-common-job
+
+
+ ruoyi-common-job 定时任务
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-autoconfigure
+
+
+
+
+ tech.powerjob
+ powerjob-worker-spring-boot-starter
+
+
+ tech.powerjob
+ powerjob-official-processors
+
+
+
+ org.projectlombok
+ lombok
+
+
+
+ com.ruoyi
+ ruoyi-common-core
+
+
+
+
diff --git a/ruoyi-common/ruoyi-common-job/src/main/java/com/ruoyi/common/job/config/PowerJobConfig.java b/ruoyi-common/ruoyi-common-job/src/main/java/com/ruoyi/common/job/config/PowerJobConfig.java
new file mode 100644
index 0000000..3e4be44
--- /dev/null
+++ b/ruoyi-common/ruoyi-common-job/src/main/java/com/ruoyi/common/job/config/PowerJobConfig.java
@@ -0,0 +1,21 @@
+package com.ruoyi.common.job.config;
+
+import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import tech.powerjob.worker.PowerJobWorker;
+
+/**
+ * 启动定时任务
+ * @author yhan219
+ * @since 2023/6/2
+ */
+@Configuration
+@ConditionalOnBean(PowerJobWorker.class)
+@ConditionalOnProperty(prefix = "powerjob.worker", name = "enabled", havingValue = "true")
+@EnableScheduling
+public class PowerJobConfig {
+
+
+}
diff --git a/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexConfig.java b/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexConfig.java
index 39ecee2..df534de 100644
--- a/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexConfig.java
+++ b/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexConfig.java
@@ -58,61 +58,4 @@ public class MyBatisFlexConfig {
//TODO:多租户配置
-
-
-// @Bean
-// public MybatisPlusInterceptor mybatisPlusInterceptor() {
-// MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
-// // 数据权限处理
-// interceptor.addInnerInterceptor(dataPermissionInterceptor());
-// // 分页插件
-// interceptor.addInnerInterceptor(paginationInnerInterceptor());
-// // 乐观锁插件
-// interceptor.addInnerInterceptor(optimisticLockerInnerInterceptor());
-// return interceptor;
-// }
-//
-// /**
-// * 数据权限拦截器
-// */
-// public PlusDataPermissionInterceptor dataPermissionInterceptor() {
-// return new PlusDataPermissionInterceptor();
-// }
-//
-// /**
-// * 分页插件,自动识别数据库类型
-// */
-// public PaginationInnerInterceptor paginationInnerInterceptor() {
-// PaginationInnerInterceptor paginationInnerInterceptor = new PaginationInnerInterceptor();
-// // 设置最大单页限制数量,默认 500 条,-1 不受限制
-// paginationInnerInterceptor.setMaxLimit(-1L);
-// // 分页合理化
-// paginationInnerInterceptor.setOverflow(true);
-// return paginationInnerInterceptor;
-// }
-//
-// /**
-// * 乐观锁插件
-// */
-// public OptimisticLockerInnerInterceptor optimisticLockerInnerInterceptor() {
-// return new OptimisticLockerInnerInterceptor();
-// }
-//
-// /**
-// * 元对象字段填充控制器
-// */
-// @Bean
-// public MetaObjectHandler metaObjectHandler() {
-// return new InjectionMetaObjectHandler();
-// }
-//
-// /**
-// * 使用网卡信息绑定雪花生成器
-// * 防止集群雪花ID重复
-// */
-// @Bean
-// public IdentifierGenerator idGenerator() {
-// return new DefaultIdentifierGenerator(NetUtil.getLocalhost());
-// }
-
}
diff --git a/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexInitConfig.java b/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexInitConfig.java
index 7a56fd6..67db827 100644
--- a/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexInitConfig.java
+++ b/ruoyi-common/ruoyi-common-orm/src/main/java/com/ruoyi/common/orm/config/MyBatisFlexInitConfig.java
@@ -22,7 +22,7 @@ public class MyBatisFlexInitConfig implements MyBatisFlexCustomizer {
FlexGlobalConfig.KeyConfig keyConfig = new FlexGlobalConfig.KeyConfig();
keyConfig.setKeyType(KeyType.Generator);
keyConfig.setValue(KeyGenerators.snowFlakeId);
- //keyConfig.setBefore(true);
+ keyConfig.setBefore(true);
globalConfig.setKeyConfig(keyConfig);
}
}
diff --git a/ruoyi-extra/pom.xml b/ruoyi-extra/pom.xml
index 71ae759..be17f77 100644
--- a/ruoyi-extra/pom.xml
+++ b/ruoyi-extra/pom.xml
@@ -14,6 +14,7 @@
ruoyi-monitor
+ ruoyi-powerjob-server
diff --git a/ruoyi-extra/ruoyi-monitor/Dockerfile b/ruoyi-extra/ruoyi-monitor/Dockerfile
deleted file mode 100644
index c506f1b..0000000
--- a/ruoyi-extra/ruoyi-monitor/Dockerfile
+++ /dev/null
@@ -1,13 +0,0 @@
-FROM findepi/graalvm:java17-native
-
-MAINTAINER Lion Li
-
-RUN mkdir -p /ruoyi/monitor/logs
-
-WORKDIR /ruoyi/monitor
-
-EXPOSE 9090
-
-ADD ./target/ruoyi-monitor-admin.jar ./app.jar
-
-ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "app.jar"]
diff --git a/ruoyi-extra/ruoyi-monitor/pom.xml b/ruoyi-extra/ruoyi-monitor/pom.xml
index 4024aa1..2399602 100644
--- a/ruoyi-extra/ruoyi-monitor/pom.xml
+++ b/ruoyi-extra/ruoyi-monitor/pom.xml
@@ -63,9 +63,6 @@
org.springframework.boot
spring-boot-maven-plugin
${spring-boot.version}
-
-
-
diff --git a/ruoyi-extra/ruoyi-powerjob-server/pom.xml b/ruoyi-extra/ruoyi-powerjob-server/pom.xml
new file mode 100644
index 0000000..8038c63
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/pom.xml
@@ -0,0 +1,76 @@
+
+
+ com.ruoyi
+ ruoyi-extra
+ ${revision}
+
+ 4.0.0
+ jar
+ ruoyi-powerjob-server
+
+
+ 2.7.14
+ 2.7.10
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-parent
+ ${spring-boot.version}
+ pom
+ import
+
+
+
+
+
+
+
+
+ tech.powerjob
+ powerjob-server-starter
+ ${powerjob.version}
+
+
+
+ de.codecentric
+ spring-boot-admin-starter-client
+ ${spring-boot-admin.version}
+
+
+
+ org.projectlombok
+ lombok
+
+
+
+
+ jakarta.servlet
+ jakarta.servlet-api
+ 4.0.4
+
+
+
+
+
+ ${project.artifactId}
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+ ${spring-boot.version}
+
+
+
+ repackage
+
+
+
+
+
+
+
+
diff --git a/ruoyi-extra/ruoyi-powerjob-server/src/main/java/com/ruoyi/powerjob/PowerJobServerApplication.java b/ruoyi-extra/ruoyi-powerjob-server/src/main/java/com/ruoyi/powerjob/PowerJobServerApplication.java
new file mode 100644
index 0000000..345c34d
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/src/main/java/com/ruoyi/powerjob/PowerJobServerApplication.java
@@ -0,0 +1,25 @@
+package com.ruoyi.powerjob;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import tech.powerjob.server.common.utils.PropertyUtils;
+
+/**
+ * powerjob 启动程序
+ *
+ * @author yhan219
+ */
+@Slf4j
+@EnableScheduling
+@SpringBootApplication(scanBasePackages = "tech.powerjob.server")
+public class PowerJobServerApplication {
+
+ public static void main(String[] args) {
+ PropertyUtils.init();
+ SpringApplication.run(tech.powerjob.server.PowerJobServerApplication.class, args);
+ log.info("文档地址: https://www.yuque.com/powerjob/guidence/problem");
+ }
+
+}
diff --git a/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-dev.properties b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-dev.properties
new file mode 100644
index 0000000..5c33798
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-dev.properties
@@ -0,0 +1,50 @@
+oms.env=dev
+
+####### Database properties(Configure according to the the environment) #######
+spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
+spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
+spring.datasource.core.username=root
+spring.datasource.core.password=Root@369
+spring.datasource.core.maximum-pool-size=20
+spring.datasource.core.minimum-idle=5
+
+# 监控配置
+# 客户端开关
+spring.boot.admin.client.enabled=true
+# 设置 Spring Boot Admin Server 地址
+spring.boot.admin.client.url: http://localhost:9090/admin
+spring.boot.admin.client.instance.service-host-type=IP
+spring.boot.admin.client.username=ruoyi
+spring.boot.admin.client.password=123456
+
+####### MongoDB properties(Non-core configuration properties) #######
+####### delete mongodb config to disable mongodb #######
+oms.mongodb.enable=false
+#spring.data.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
+
+####### Email properties(Non-core configuration properties) #######
+####### Delete the following code to disable the mail #######
+#spring.mail.host=smtp.163.com
+#spring.mail.username=zqq@163.com
+#spring.mail.password=GOFZPNARMVKCGONV
+#spring.mail.properties.mail.smtp.auth=true
+#spring.mail.properties.mail.smtp.starttls.enable=true
+#spring.mail.properties.mail.smtp.starttls.required=true
+
+####### DingTalk properties(Non-core configuration properties) #######
+####### Delete the following code to disable the DingTalk #######
+#oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
+#oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
+#oms.alarm.ding.agent-id=847044348
+
+####### Resource cleaning properties #######
+oms.instanceinfo.retention=1
+oms.container.retention.local=1
+oms.container.retention.remote=-1
+
+####### Cache properties #######
+oms.instance.metadata.cache.size=1024
+
+####### Threshold in precise fetching server(0~100). 100 means full detection of server, in which #######
+####### split-brain could be avoided while performance overhead would increase. #######
+oms.accurate.select.server.percentage = 50
diff --git a/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-prod.properties b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-prod.properties
new file mode 100644
index 0000000..7d6a1cc
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application-prod.properties
@@ -0,0 +1,50 @@
+oms.env=prod
+
+####### Database properties(Configure according to the the environment) #######
+spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
+spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
+spring.datasource.core.username=root
+spring.datasource.core.password=Root@369
+spring.datasource.core.maximum-pool-size=20
+spring.datasource.core.minimum-idle=5
+
+# 监控配置
+# 客户端开关
+spring.boot.admin.client.enabled=true
+# 设置 Spring Boot Admin Server 地址
+spring.boot.admin.client.url: http://localhost:9090/admin
+spring.boot.admin.client.instance.service-host-type=IP
+spring.boot.admin.client.username=ruoyi
+spring.boot.admin.client.password=123456
+
+####### MongoDB properties(Non-core configuration properties) #######
+####### delete mongodb config to disable mongodb #######
+oms.mongodb.enable=false
+#spring.data.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
+
+####### Email properties(Non-core configuration properties) #######
+####### Delete the following code to disable the mail #######
+#spring.mail.host=smtp.163.com
+#spring.mail.username=zqq@163.com
+#spring.mail.password=GOFZPNARMVKCGONV
+#spring.mail.properties.mail.smtp.auth=true
+#spring.mail.properties.mail.smtp.starttls.enable=true
+#spring.mail.properties.mail.smtp.starttls.required=true
+
+####### DingTalk properties(Non-core configuration properties) #######
+####### Delete the following code to disable the DingTalk #######
+#oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
+#oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
+#oms.alarm.ding.agent-id=847044348
+
+####### Resource cleaning properties #######
+oms.instanceinfo.retention=7
+oms.container.retention.local=7
+oms.container.retention.remote=-1
+
+####### Cache properties #######
+oms.instance.metadata.cache.size=2048
+
+####### Threshold in precise fetching server(0~100). 100 means full detection of server, in which #######
+####### split-brain could be avoided while performance overhead would increase. #######
+oms.accurate.select.server.percentage = 50
diff --git a/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application.properties b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application.properties
new file mode 100644
index 0000000..5735473
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/application.properties
@@ -0,0 +1,33 @@
+# Http server port
+server.port=7700
+
+spring.profiles.active=@profiles.active@
+spring.main.banner-mode=log
+spring.jpa.open-in-view=false
+spring.data.mongodb.repositories.type=none
+logging.level.org.mongodb=warn
+logging.level.tech.powerjob.server=warn
+logging.level.MONITOR_LOGGER_DB_OPERATION=warn
+logging.level.MONITOR_LOGGER_WORKER_HEART_BEAT=warn
+logging.config: classpath:logback.xml
+
+# Configuration for uploading files.
+spring.servlet.multipart.enabled=true
+spring.servlet.multipart.file-size-threshold=0
+spring.servlet.multipart.max-file-size=209715200
+spring.servlet.multipart.max-request-size=209715200
+
+###### PowerJob transporter configuration ######
+oms.transporter.active.protocols=AKKA,HTTP
+oms.transporter.main.protocol=HTTP
+oms.akka.port=10086
+oms.http.port=10010
+# Prefix for all tables. Default empty string. Config if you have needs, i.e. pj_
+oms.table-prefix=pj_
+
+# Actuator 监控端点的配置项
+spring.application.name: ruoyi-powerjob-server
+management.endpoints.web.exposure.include=*
+management.endpoint.health.show-details=ALWAYS
+management.endpoint.logfile.external-file=./logs/ruoyi-powerjob-server.log
+management.health.mongo.enabled=${oms.mongodb.enable}
diff --git a/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/logback.xml b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/logback.xml
new file mode 100644
index 0000000..ad3bfa9
--- /dev/null
+++ b/ruoyi-extra/ruoyi-powerjob-server/src/main/resources/logback.xml
@@ -0,0 +1,34 @@
+
+
+
+ logback
+
+
+
+
+
+
+ ${console.log.pattern}
+ utf-8
+
+
+
+
+ ${log.path}.log
+
+ ${log.path}.%d{yyyy-MM-dd}.log
+
+ 60
+
+
+ ${log.pattern}
+
+
+
+
+
+
+
+
+
diff --git a/ruoyi-modules/pom.xml b/ruoyi-modules/pom.xml
index 8cdb302..a6a6a82 100644
--- a/ruoyi-modules/pom.xml
+++ b/ruoyi-modules/pom.xml
@@ -13,7 +13,8 @@
ruoyi-demo
ruoyi-generator
- ruoyi-quartz
+
+ ruoyi-job
ruoyi-system
diff --git a/ruoyi-modules/ruoyi-job/pom.xml b/ruoyi-modules/ruoyi-job/pom.xml
new file mode 100644
index 0000000..9869873
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/pom.xml
@@ -0,0 +1,35 @@
+
+
+
+ com.ruoyi
+ ruoyi-modules
+ ${revision}
+
+ 4.0.0
+ jar
+ ruoyi-job
+
+
+ 任务调度
+
+
+
+
+
+
+ com.ruoyi
+ ruoyi-common-json
+
+
+
+
+ com.ruoyi
+ ruoyi-common-job
+
+
+
+
+
+
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/package-info.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/package-info.java
new file mode 100644
index 0000000..d528032
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/package-info.java
@@ -0,0 +1,6 @@
+/**
+ * 演示用例
+ * copy from https://github.com/PowerJob/PowerJob/tree/master/powerjob-worker-samples/src/main/java/tech/powerjob/samples
+ */
+
+package com.ruoyi.job;
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/BroadcastProcessorDemo.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/BroadcastProcessorDemo.java
new file mode 100644
index 0000000..4b6f257
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/BroadcastProcessorDemo.java
@@ -0,0 +1,56 @@
+package com.ruoyi.job.processors;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Component;
+import tech.powerjob.common.utils.NetUtils;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.TaskResult;
+import tech.powerjob.worker.core.processor.sdk.BroadcastProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.List;
+
+/**
+ * 广播处理器 示例
+ *
+ * @author tjq
+ * @since 2020/4/17
+ */
+@Slf4j
+@Component
+public class BroadcastProcessorDemo implements BroadcastProcessor {
+
+ @Override
+ public ProcessResult preProcess(TaskContext context) {
+ System.out.println("===== BroadcastProcessorDemo#preProcess ======");
+ context.getOmsLogger().info("BroadcastProcessorDemo#preProcess, current host: {}", NetUtils.getLocalHost());
+ if ("rootFailed".equals(context.getJobParams())) {
+ return new ProcessResult(false, "console need failed");
+ } else {
+ return new ProcessResult(true);
+ }
+ }
+
+ @Override
+ public ProcessResult process(TaskContext taskContext) throws Exception {
+ OmsLogger logger = taskContext.getOmsLogger();
+ System.out.println("===== BroadcastProcessorDemo#process ======");
+ logger.info("BroadcastProcessorDemo#process, current host: {}", NetUtils.getLocalHost());
+ long sleepTime = 1000;
+ try {
+ sleepTime = Long.parseLong(taskContext.getJobParams());
+ } catch (Exception e) {
+ logger.warn("[BroadcastProcessor] parse sleep time failed!", e);
+ }
+ Thread.sleep(Math.max(sleepTime, 1000));
+ return new ProcessResult(true);
+ }
+
+ @Override
+ public ProcessResult postProcess(TaskContext context, List taskResults) {
+ System.out.println("===== BroadcastProcessorDemo#postProcess ======");
+ context.getOmsLogger().info("BroadcastProcessorDemo#postProcess, current host: {}, taskResult: {}", NetUtils.getLocalHost(), taskResults);
+ return new ProcessResult(true, "success");
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/LogTestProcessor.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/LogTestProcessor.java
new file mode 100644
index 0000000..e9c3966
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/LogTestProcessor.java
@@ -0,0 +1,41 @@
+package com.ruoyi.job.processors;
+
+import com.alibaba.fastjson.JSONObject;
+import org.springframework.stereotype.Component;
+import tech.powerjob.official.processors.util.CommonUtils;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.Date;
+import java.util.Optional;
+
+/**
+ * LogTestProcessor
+ *
+ * @author tjq
+ * @since 2022/9/18
+ */
+@Component
+public class LogTestProcessor implements BasicProcessor {
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+
+ final OmsLogger omsLogger = context.getOmsLogger();
+ final String parseParams = CommonUtils.parseParams(context);
+ final JSONObject config = Optional.ofNullable(JSONObject.parseObject(parseParams)).orElse(new JSONObject());
+
+ final long loopTimes = Optional.ofNullable(config.getLong("loopTimes")).orElse(1000L);
+
+ for (int i = 0; i < loopTimes; i++) {
+ omsLogger.debug("[DEBUG] one DEBUG log in {}", new Date());
+ omsLogger.info("[INFO] one INFO log in {}", new Date());
+ omsLogger.warn("[WARN] one WARN log in {}", new Date());
+ omsLogger.error("[ERROR] one ERROR log in {}", new Date());
+ }
+
+ return new ProcessResult(true);
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapProcessorDemo.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapProcessorDemo.java
new file mode 100644
index 0000000..ad2ef2a
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapProcessorDemo.java
@@ -0,0 +1,93 @@
+package com.ruoyi.job.processors;
+
+import com.google.common.collect.Lists;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import com.ruoyi.common.json.utils.JsonUtils;
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.MapProcessor;
+
+import java.util.List;
+import java.util.concurrent.ThreadLocalRandom;
+
+/**
+ * Map处理器 示例
+ *
+ * @author tjq
+ * @since 2020/4/18
+ */
+@Component
+public class MapProcessorDemo implements MapProcessor {
+
+
+ /**
+ * 每一批发送任务大小
+ */
+ private static final int BATCH_SIZE = 100;
+ /**
+ * 发送的批次
+ */
+ private static final int BATCH_NUM = 5;
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+
+ log.info("============== MapProcessorDemo#process ==============");
+ log.info("isRootTask:{}", isRootTask());
+ log.info("taskContext:{}", JsonUtils.toJsonString(context));
+
+ if (isRootTask()) {
+ log.info("==== MAP ====");
+ List subTasks = Lists.newLinkedList();
+ for (int j = 0; j < BATCH_NUM; j++) {
+ SubTask subTask = new SubTask();
+ subTask.siteId = j;
+ subTask.itemIds = Lists.newLinkedList();
+ subTasks.add(subTask);
+ for (int i = 0; i < BATCH_SIZE; i++) {
+ subTask.itemIds.add(i + j * 100);
+ }
+ }
+ map(subTasks, "MAP_TEST_TASK");
+ return new ProcessResult(true, "map successfully");
+ } else {
+
+ log.info("==== PROCESS ====");
+ SubTask subTask = (SubTask) context.getSubTask();
+ for (Integer itemId : subTask.getItemIds()) {
+ if (Thread.interrupted()) {
+ // 任务被中断
+ log.info("job has been stop! so stop to process subTask: {} => {}", subTask.getSiteId(), itemId);
+ break;
+ }
+ log.info("processing subTask: {} => {}", subTask.getSiteId(), itemId);
+ int max = Integer.MAX_VALUE >> 7;
+ for (int i = 0; ; i++) {
+ // 模拟耗时操作
+ if (i > max) {
+ break;
+ }
+ }
+ }
+ // 测试在 Map 任务中追加上下文
+ context.getWorkflowContext().appendData2WfContext("Yasuo", "A sword's poor company for a long road.");
+ boolean b = ThreadLocalRandom.current().nextBoolean();
+ if (context.getCurrentRetryTimes() >= 1) {
+ // 重试的话一定会成功
+ b = true;
+ }
+ return new ProcessResult(b, "RESULT:" + b);
+ }
+ }
+
+ @Getter
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class SubTask {
+ private Integer siteId;
+ private List itemIds;
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapReduceProcessorDemo.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapReduceProcessorDemo.java
new file mode 100644
index 0000000..f6faf2c
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/MapReduceProcessorDemo.java
@@ -0,0 +1,93 @@
+package com.ruoyi.job.processors;
+
+import cn.hutool.core.lang.Dict;
+import com.google.common.collect.Lists;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.ToString;
+import lombok.extern.slf4j.Slf4j;
+import com.ruoyi.common.json.utils.JsonUtils;
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.TaskResult;
+import tech.powerjob.worker.core.processor.sdk.MapReduceProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.List;
+import java.util.concurrent.ThreadLocalRandom;
+
+/**
+ * MapReduce 处理器示例
+ * 控制台参数:{"batchSize": 100, "batchNum": 2}
+ *
+ * @author tjq
+ * @since 2020/4/17
+ */
+@Slf4j
+@Component
+public class MapReduceProcessorDemo implements MapReduceProcessor {
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+
+ OmsLogger omsLogger = context.getOmsLogger();
+
+ log.info("============== TestMapReduceProcessor#process ==============");
+ log.info("isRootTask:{}", isRootTask());
+ log.info("taskContext:{}", JsonUtils.toJsonString(context));
+
+ // 根据控制台参数获取MR批次及子任务大小
+ final Dict jobParams = JsonUtils.parseMap(context.getJobParams());
+
+ Integer batchSize = (Integer) jobParams.getOrDefault("batchSize", 100);
+ Integer batchNum = (Integer) jobParams.getOrDefault("batchNum", 10);
+
+ if (isRootTask()) {
+ log.info("==== MAP ====");
+ omsLogger.info("[DemoMRProcessor] start root task~");
+ List subTasks = Lists.newLinkedList();
+ for (int j = 0; j < batchNum; j++) {
+ for (int i = 0; i < batchSize; i++) {
+ int x = j * batchSize + i;
+ subTasks.add(new TestSubTask("name" + x, x));
+ }
+ map(subTasks, "MAP_TEST_TASK");
+ subTasks.clear();
+ }
+ omsLogger.info("[DemoMRProcessor] map success~");
+ return new ProcessResult(true, "MAP_SUCCESS");
+ } else {
+ log.info("==== NORMAL_PROCESS ====");
+ omsLogger.info("[DemoMRProcessor] process subTask: {}.", JsonUtils.toJsonString(context.getSubTask()));
+ log.info("subTask: {}", JsonUtils.toJsonString(context.getSubTask()));
+ Thread.sleep(1000);
+ if (context.getCurrentRetryTimes() == 0) {
+ return new ProcessResult(false, "FIRST_FAILED");
+ } else {
+ return new ProcessResult(true, "PROCESS_SUCCESS");
+ }
+ }
+ }
+
+ @Override
+ public ProcessResult reduce(TaskContext context, List taskResults) {
+ log.info("================ MapReduceProcessorDemo#reduce ================");
+ log.info("TaskContext: {}", JsonUtils.toJsonString(context));
+ log.info("List: {}", JsonUtils.toJsonString(taskResults));
+ context.getOmsLogger().info("MapReduce job finished, result is {}.", taskResults);
+
+ boolean success = ThreadLocalRandom.current().nextBoolean();
+ return new ProcessResult(success, context + ": " + success);
+ }
+
+ @Getter
+ @ToString
+ @NoArgsConstructor
+ @AllArgsConstructor
+ public static class TestSubTask {
+ private String name;
+ private int age;
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/SimpleProcessor.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/SimpleProcessor.java
new file mode 100644
index 0000000..1d75685
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/SimpleProcessor.java
@@ -0,0 +1,35 @@
+package com.ruoyi.job.processors;
+
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.Optional;
+
+/**
+ * @author Echo009
+ * @since 2022/4/27
+ */
+@Component
+public class SimpleProcessor implements BasicProcessor {
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+
+ OmsLogger logger = context.getOmsLogger();
+
+ String jobParams = Optional.ofNullable(context.getJobParams()).orElse("S");
+ logger.info("Current context:{}", context.getWorkflowContext());
+ logger.info("Current job params:{}", jobParams);
+
+ // 测试中文问题 #581
+ if (jobParams.contains("CN")) {
+ return new ProcessResult(true, "任务成功啦!!!");
+ }
+
+ return jobParams.contains("F") ? new ProcessResult(false) : new ProcessResult(true, "yeah!");
+
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/StandaloneProcessorDemo.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/StandaloneProcessorDemo.java
new file mode 100644
index 0000000..7a88695
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/StandaloneProcessorDemo.java
@@ -0,0 +1,51 @@
+package com.ruoyi.job.processors;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.Collections;
+
+/**
+ * 单机处理器 示例
+ *
+ * @author tjq
+ * @since 2020/4/17
+ */
+@Slf4j
+@Component
+public class StandaloneProcessorDemo implements BasicProcessor {
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+ OmsLogger omsLogger = context.getOmsLogger();
+ omsLogger.info("StandaloneProcessorDemo start process,context is {}.", context);
+ omsLogger.info("Notice! If you want this job process failed, your jobParams need to be 'failed'");
+ omsLogger.info("Let's test the exception~");
+ // 测试异常日志
+ try {
+ Collections.emptyList().add("277");
+ } catch (Exception e) {
+ omsLogger.error("oh~it seems that we have an exception~", e);
+ }
+ log.info("================ StandaloneProcessorDemo#process ================");
+ log.info("jobParam:{}", context.getJobParams());
+ log.info("instanceParams:{}", context.getInstanceParams());
+ String param;
+ // 解析参数,非处于工作流中时,优先取实例参数(允许动态[instanceParams]覆盖静态参数[jobParams])
+ if (context.getWorkflowContext() == null) {
+ param = StringUtils.isBlank(context.getInstanceParams()) ? context.getJobParams() : context.getInstanceParams();
+ } else {
+ param = context.getJobParams();
+ }
+ // 根据参数判断是否成功
+ boolean success = !"failed".equals(param);
+ omsLogger.info("StandaloneProcessorDemo finished process,success: {}", success);
+ omsLogger.info("anyway, we finished the job successfully~Congratulations!");
+ return new ProcessResult(success, context + ": " + success);
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/TimeoutProcessor.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/TimeoutProcessor.java
new file mode 100644
index 0000000..703f9cf
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/processors/TimeoutProcessor.java
@@ -0,0 +1,25 @@
+package com.ruoyi.job.processors;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
+
+/**
+ * 测试超时任务(可中断)
+ *
+ * @author tjq
+ * @since 2020/4/20
+ */
+@Component
+@Slf4j
+public class TimeoutProcessor implements BasicProcessor {
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+ long sleepTime = Long.parseLong(context.getJobParams());
+ log.info("TaskInstance({}) will sleep {} ms", context.getInstanceId(), sleepTime);
+ Thread.sleep(Long.parseLong(context.getJobParams()));
+ return new ProcessResult(true, "impossible~~~~QAQ~");
+ }
+}
diff --git a/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/workflow/WorkflowStandaloneProcessor.java b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/workflow/WorkflowStandaloneProcessor.java
new file mode 100644
index 0000000..ad77eef
--- /dev/null
+++ b/ruoyi-modules/ruoyi-job/src/main/java/com/ruoyi/job/workflow/WorkflowStandaloneProcessor.java
@@ -0,0 +1,36 @@
+package com.ruoyi.job.workflow;
+
+import com.alibaba.fastjson.JSON;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Component;
+import tech.powerjob.worker.core.processor.ProcessResult;
+import tech.powerjob.worker.core.processor.TaskContext;
+import tech.powerjob.worker.core.processor.sdk.BasicProcessor;
+import tech.powerjob.worker.log.OmsLogger;
+
+import java.util.Map;
+
+/**
+ * 工作流测试
+ *
+ * @author tjq
+ * @since 2020/6/2
+ */
+@Component
+@Slf4j
+public class WorkflowStandaloneProcessor implements BasicProcessor {
+
+ @Override
+ public ProcessResult process(TaskContext context) throws Exception {
+ OmsLogger logger = context.getOmsLogger();
+ logger.info("current jobParams: {}", context.getJobParams());
+ logger.info("current context: {}", context.getWorkflowContext());
+ log.info("jobParams:{}", context.getJobParams());
+ log.info("currentContext:{}", JSON.toJSONString(context));
+
+ // 尝试获取上游任务
+ Map workflowContext = context.getWorkflowContext().fetchWorkflowContext();
+ log.info("工作流上下文数据:{}", workflowContext);
+ return new ProcessResult(true, context.getJobId() + " process successfully.");
+ }
+}
diff --git a/ruoyi-ui/.env.development b/ruoyi-ui/.env.development
index 2f2426c..e1f0eba 100644
--- a/ruoyi-ui/.env.development
+++ b/ruoyi-ui/.env.development
@@ -10,5 +10,8 @@ VUE_APP_BASE_API = '/dev-api'
# 监控地址
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
+# powerjob任务调度控制台地址
+VUE_APP_POWERJOB_ADMIN = 'http://localhost:7700/'
+
# 路由懒加载
VUE_CLI_BABEL_TRANSPILE_MODULES = true
diff --git a/ruoyi-ui/.env.production b/ruoyi-ui/.env.production
index e9abac0..008b527 100644
--- a/ruoyi-ui/.env.production
+++ b/ruoyi-ui/.env.production
@@ -9,3 +9,6 @@ VUE_APP_BASE_API = '/prod-api'
# 监控地址
VUE_APP_MONITRO_ADMIN = 'http://localhost:9090/admin/applications'
+
+# powerjob任务调度控制台地址
+VUE_APP_POWERJOB_ADMIN = 'http://localhost:7700/'
diff --git a/ruoyi-ui/src/views/monitor/powerjob/index.vue b/ruoyi-ui/src/views/monitor/powerjob/index.vue
new file mode 100644
index 0000000..7561413
--- /dev/null
+++ b/ruoyi-ui/src/views/monitor/powerjob/index.vue
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
diff --git a/sql/quartz.sql b/sql/quartz.sql
deleted file mode 100644
index cee613b..0000000
--- a/sql/quartz.sql
+++ /dev/null
@@ -1,174 +0,0 @@
-DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
-DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
-DROP TABLE IF EXISTS QRTZ_LOCKS;
-DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_TRIGGERS;
-DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
-DROP TABLE IF EXISTS QRTZ_CALENDARS;
-
--- ----------------------------
--- 1、存储每一个已配置的 jobDetail 的详细信息
--- ----------------------------
-create table QRTZ_JOB_DETAILS (
- sched_name varchar(120) not null comment '调度名称',
- job_name varchar(200) not null comment '任务名称',
- job_group varchar(200) not null comment '任务组名',
- description varchar(250) null comment '相关介绍',
- job_class_name varchar(250) not null comment '执行任务类名称',
- is_durable varchar(1) not null comment '是否持久化',
- is_nonconcurrent varchar(1) not null comment '是否并发',
- is_update_data varchar(1) not null comment '是否更新数据',
- requests_recovery varchar(1) not null comment '是否接受恢复执行',
- job_data blob null comment '存放持久化job对象',
- primary key (sched_name, job_name, job_group)
-) engine=innodb comment = '任务详细信息表';
-
--- ----------------------------
--- 2、 存储已配置的 Trigger 的信息
--- ----------------------------
-create table QRTZ_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_name varchar(200) not null comment '触发器的名字',
- trigger_group varchar(200) not null comment '触发器所属组的名字',
- job_name varchar(200) not null comment 'qrtz_job_details表job_name的外键',
- job_group varchar(200) not null comment 'qrtz_job_details表job_group的外键',
- description varchar(250) null comment '相关介绍',
- next_fire_time bigint(13) null comment '上一次触发时间(毫秒)',
- prev_fire_time bigint(13) null comment '下一次触发时间(默认为-1表示不触发)',
- priority integer null comment '优先级',
- trigger_state varchar(16) not null comment '触发器状态',
- trigger_type varchar(8) not null comment '触发器的类型',
- start_time bigint(13) not null comment '开始时间',
- end_time bigint(13) null comment '结束时间',
- calendar_name varchar(200) null comment '日程表名称',
- misfire_instr smallint(2) null comment '补偿执行的策略',
- job_data blob null comment '存放持久化job对象',
- primary key (sched_name, trigger_name, trigger_group),
- foreign key (sched_name, job_name, job_group) references QRTZ_JOB_DETAILS(sched_name, job_name, job_group)
-) engine=innodb comment = '触发器详细信息表';
-
--- ----------------------------
--- 3、 存储简单的 Trigger,包括重复次数,间隔,以及已触发的次数
--- ----------------------------
-create table QRTZ_SIMPLE_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- repeat_count bigint(7) not null comment '重复的次数统计',
- repeat_interval bigint(12) not null comment '重复的间隔时间',
- times_triggered bigint(10) not null comment '已经触发的次数',
- primary key (sched_name, trigger_name, trigger_group),
- foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
-) engine=innodb comment = '简单触发器的信息表';
-
--- ----------------------------
--- 4、 存储 Cron Trigger,包括 Cron 表达式和时区信息
--- ----------------------------
-create table QRTZ_CRON_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- cron_expression varchar(200) not null comment 'cron表达式',
- time_zone_id varchar(80) comment '时区',
- primary key (sched_name, trigger_name, trigger_group),
- foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
-) engine=innodb comment = 'Cron类型的触发器表';
-
--- ----------------------------
--- 5、 Trigger 作为 Blob 类型存储(用于 Quartz 用户用 JDBC 创建他们自己定制的 Trigger 类型,JobStore 并不知道如何存储实例的时候)
--- ----------------------------
-create table QRTZ_BLOB_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- blob_data blob null comment '存放持久化Trigger对象',
- primary key (sched_name, trigger_name, trigger_group),
- foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
-) engine=innodb comment = 'Blob类型的触发器表';
-
--- ----------------------------
--- 6、 以 Blob 类型存储存放日历信息, quartz可配置一个日历来指定一个时间范围
--- ----------------------------
-create table QRTZ_CALENDARS (
- sched_name varchar(120) not null comment '调度名称',
- calendar_name varchar(200) not null comment '日历名称',
- calendar blob not null comment '存放持久化calendar对象',
- primary key (sched_name, calendar_name)
-) engine=innodb comment = '日历信息表';
-
--- ----------------------------
--- 7、 存储已暂停的 Trigger 组的信息
--- ----------------------------
-create table QRTZ_PAUSED_TRIGGER_GRPS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- primary key (sched_name, trigger_group)
-) engine=innodb comment = '暂停的触发器表';
-
--- ----------------------------
--- 8、 存储与已触发的 Trigger 相关的状态信息,以及相联 Job 的执行信息
--- ----------------------------
-create table QRTZ_FIRED_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- entry_id varchar(95) not null comment '调度器实例id',
- trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- instance_name varchar(200) not null comment '调度器实例名',
- fired_time bigint(13) not null comment '触发的时间',
- sched_time bigint(13) not null comment '定时器制定的时间',
- priority integer not null comment '优先级',
- state varchar(16) not null comment '状态',
- job_name varchar(200) null comment '任务名称',
- job_group varchar(200) null comment '任务组名',
- is_nonconcurrent varchar(1) null comment '是否并发',
- requests_recovery varchar(1) null comment '是否接受恢复执行',
- primary key (sched_name, entry_id)
-) engine=innodb comment = '已触发的触发器表';
-
--- ----------------------------
--- 9、 存储少量的有关 Scheduler 的状态信息,假如是用于集群中,可以看到其他的 Scheduler 实例
--- ----------------------------
-create table QRTZ_SCHEDULER_STATE (
- sched_name varchar(120) not null comment '调度名称',
- instance_name varchar(200) not null comment '实例名称',
- last_checkin_time bigint(13) not null comment '上次检查时间',
- checkin_interval bigint(13) not null comment '检查间隔时间',
- primary key (sched_name, instance_name)
-) engine=innodb comment = '调度器状态表';
-
--- ----------------------------
--- 10、 存储程序的悲观锁的信息(假如使用了悲观锁)
--- ----------------------------
-create table QRTZ_LOCKS (
- sched_name varchar(120) not null comment '调度名称',
- lock_name varchar(40) not null comment '悲观锁名称',
- primary key (sched_name, lock_name)
-) engine=innodb comment = '存储的悲观锁信息表';
-
--- ----------------------------
--- 11、 Quartz集群实现同步机制的行锁表
--- ----------------------------
-create table QRTZ_SIMPROP_TRIGGERS (
- sched_name varchar(120) not null comment '调度名称',
- trigger_name varchar(200) not null comment 'qrtz_triggers表trigger_name的外键',
- trigger_group varchar(200) not null comment 'qrtz_triggers表trigger_group的外键',
- str_prop_1 varchar(512) null comment 'String类型的trigger的第一个参数',
- str_prop_2 varchar(512) null comment 'String类型的trigger的第二个参数',
- str_prop_3 varchar(512) null comment 'String类型的trigger的第三个参数',
- int_prop_1 int null comment 'int类型的trigger的第一个参数',
- int_prop_2 int null comment 'int类型的trigger的第二个参数',
- long_prop_1 bigint null comment 'long类型的trigger的第一个参数',
- long_prop_2 bigint null comment 'long类型的trigger的第二个参数',
- dec_prop_1 numeric(13,4) null comment 'decimal类型的trigger的第一个参数',
- dec_prop_2 numeric(13,4) null comment 'decimal类型的trigger的第二个参数',
- bool_prop_1 varchar(1) null comment 'Boolean类型的trigger的第一个参数',
- bool_prop_2 varchar(1) null comment 'Boolean类型的trigger的第二个参数',
- primary key (sched_name, trigger_name, trigger_group),
- foreign key (sched_name, trigger_name, trigger_group) references QRTZ_TRIGGERS(sched_name, trigger_name, trigger_group)
-) engine=innodb comment = '同步机制的行锁表';
-
-commit;
\ No newline at end of file
diff --git a/sql/update.sql b/sql/update.sql
index 832eee1..9435e7d 100644
--- a/sql/update.sql
+++ b/sql/update.sql
@@ -275,5 +275,241 @@ delete FROM sys_menu WHERE menu_name = "缓存列表"
-- “服务监控”菜单使用SpringBoot-Admin监控框架:
UPDATE `sys_menu` SET `path`='admin', `component`='monitor/admin/index', `perms`='monitor:admin:list' WHERE `menu_id`=112;
+-- update to V4.1.6:
+-- ----------------------------
+-- Table structure for pj_app_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_app_info`;
+CREATE TABLE `pj_app_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `app_name` varchar(255) NULL DEFAULT NULL,
+ `current_server` varchar(255) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `password` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE INDEX `uidx01_app_info`(`app_name`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 2 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of pj_app_info
+-- ----------------------------
+INSERT INTO `pj_app_info` VALUES (1, 'ruoyi-worker', '127.0.0.1:10010', '2023-06-13 16:32:59.263000', '2023-07-04 17:25:49.798000', '123456');
+
+-- ----------------------------
+-- Table structure for pj_container_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_container_info`;
+CREATE TABLE `pj_container_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `app_id` bigint(0) NULL DEFAULT NULL,
+ `container_name` varchar(255) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `last_deploy_time` datetime(6) NULL DEFAULT NULL,
+ `source_info` varchar(255) NULL DEFAULT NULL,
+ `source_type` int(0) NULL DEFAULT NULL,
+ `status` int(0) NULL DEFAULT NULL,
+ `version` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `idx01_container_info`(`app_id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_instance_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_instance_info`;
+CREATE TABLE `pj_instance_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `actual_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `app_id` bigint(0) NULL DEFAULT NULL,
+ `expected_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `finished_time` bigint(0) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `instance_id` bigint(0) NULL DEFAULT NULL,
+ `instance_params` longtext NULL,
+ `job_id` bigint(0) NULL DEFAULT NULL,
+ `job_params` longtext NULL,
+ `last_report_time` bigint(0) NULL DEFAULT NULL,
+ `result` longtext NULL,
+ `running_times` bigint(0) NULL DEFAULT NULL,
+ `status` int(0) NULL DEFAULT NULL,
+ `task_tracker_address` varchar(255) NULL DEFAULT NULL,
+ `type` int(0) NULL DEFAULT NULL,
+ `wf_instance_id` bigint(0) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `idx01_instance_info`(`job_id`, `status`) USING BTREE,
+ INDEX `idx02_instance_info`(`app_id`, `status`) USING BTREE,
+ INDEX `idx03_instance_info`(`instance_id`, `status`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 4 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_job_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_job_info`;
+CREATE TABLE `pj_job_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `alarm_config` varchar(255) NULL DEFAULT NULL,
+ `app_id` bigint(0) NULL DEFAULT NULL,
+ `concurrency` int(0) NULL DEFAULT NULL,
+ `designated_workers` varchar(255) NULL DEFAULT NULL,
+ `dispatch_strategy` int(0) NULL DEFAULT NULL,
+ `execute_type` int(0) NULL DEFAULT NULL,
+ `extra` varchar(255) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `instance_retry_num` int(0) NULL DEFAULT NULL,
+ `instance_time_limit` bigint(0) NULL DEFAULT NULL,
+ `job_description` varchar(255) NULL DEFAULT NULL,
+ `job_name` varchar(255) NULL DEFAULT NULL,
+ `job_params` longtext NULL,
+ `lifecycle` varchar(255) NULL DEFAULT NULL,
+ `log_config` varchar(255) NULL DEFAULT NULL,
+ `max_instance_num` int(0) NULL DEFAULT NULL,
+ `max_worker_count` int(0) NULL DEFAULT NULL,
+ `min_cpu_cores` double NOT NULL,
+ `min_disk_space` double NOT NULL,
+ `min_memory_space` double NOT NULL,
+ `next_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `notify_user_ids` varchar(255) NULL DEFAULT NULL,
+ `processor_info` varchar(255) NULL DEFAULT NULL,
+ `processor_type` int(0) NULL DEFAULT NULL,
+ `status` int(0) NULL DEFAULT NULL,
+ `tag` varchar(255) NULL DEFAULT NULL,
+ `task_retry_num` int(0) NULL DEFAULT NULL,
+ `time_expression` varchar(255) NULL DEFAULT NULL,
+ `time_expression_type` int(0) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `idx01_job_info`(`app_id`, `status`, `time_expression_type`, `next_trigger_time`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 5 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of pj_job_info
+-- ----------------------------
+INSERT INTO `pj_job_info` VALUES (1, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 2, 1, NULL, '2023-06-02 15:01:27.717000', '2023-07-04 17:22:12.374000', 1, 0, '', '单机处理器执行测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.StandaloneProcessorDemo', 1, 2, NULL, 1, '30000', 3);
+INSERT INTO `pj_job_info` VALUES (2, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 2, NULL, '2023-06-02 15:04:45.342000', '2023-07-04 17:22:12.816000', 0, 0, NULL, '广播处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.BroadcastProcessorDemo', 1, 2, NULL, 1, '30000', 3);
+INSERT INTO `pj_job_info` VALUES (3, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 4, NULL, '2023-06-02 15:13:23.519000', '2023-06-02 16:03:22.421000', 0, 0, NULL, 'Map处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.MapProcessorDemo', 1, 2, NULL, 1, '1000', 3);
+INSERT INTO `pj_job_info` VALUES (4, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 3, NULL, '2023-06-02 15:45:25.896000', '2023-06-02 16:03:23.125000', 0, 0, NULL, 'MapReduce处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.MapReduceProcessorDemo', 1, 2, NULL, 1, '1000', 3);
+
+-- ----------------------------
+-- Table structure for pj_oms_lock
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_oms_lock`;
+CREATE TABLE `pj_oms_lock` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `lock_name` varchar(255) NULL DEFAULT NULL,
+ `max_lock_time` bigint(0) NULL DEFAULT NULL,
+ `ownerip` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE INDEX `uidx01_oms_lock`(`lock_name`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 6 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_server_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_server_info`;
+CREATE TABLE `pj_server_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `ip` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE INDEX `uidx01_server_info`(`ip`) USING BTREE,
+ INDEX `idx01_server_info`(`gmt_modified`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 2 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_user_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_user_info`;
+CREATE TABLE `pj_user_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `email` varchar(255) NULL DEFAULT NULL,
+ `extra` varchar(255) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `password` varchar(255) NULL DEFAULT NULL,
+ `phone` varchar(255) NULL DEFAULT NULL,
+ `username` varchar(255) NULL DEFAULT NULL,
+ `web_hook` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `uidx01_user_info`(`username`) USING BTREE,
+ INDEX `uidx02_user_info`(`email`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_workflow_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_workflow_info`;
+CREATE TABLE `pj_workflow_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `app_id` bigint(0) NULL DEFAULT NULL,
+ `extra` varchar(255) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `lifecycle` varchar(255) NULL DEFAULT NULL,
+ `max_wf_instance_num` int(0) NULL DEFAULT NULL,
+ `next_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `notify_user_ids` varchar(255) NULL DEFAULT NULL,
+ `pedag` longtext NULL,
+ `status` int(0) NULL DEFAULT NULL,
+ `time_expression` varchar(255) NULL DEFAULT NULL,
+ `time_expression_type` int(0) NULL DEFAULT NULL,
+ `wf_description` varchar(255) NULL DEFAULT NULL,
+ `wf_name` varchar(255) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `idx01_workflow_info`(`app_id`, `status`, `time_expression_type`, `next_trigger_time`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_workflow_instance_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_workflow_instance_info`;
+CREATE TABLE `pj_workflow_instance_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `actual_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `app_id` bigint(0) NULL DEFAULT NULL,
+ `dag` longtext NULL,
+ `expected_trigger_time` bigint(0) NULL DEFAULT NULL,
+ `finished_time` bigint(0) NULL DEFAULT NULL,
+ `gmt_create` datetime(6) NULL DEFAULT NULL,
+ `gmt_modified` datetime(6) NULL DEFAULT NULL,
+ `parent_wf_instance_id` bigint(0) NULL DEFAULT NULL,
+ `result` longtext NULL,
+ `status` int(0) NULL DEFAULT NULL,
+ `wf_context` longtext NULL,
+ `wf_init_params` longtext NULL,
+ `wf_instance_id` bigint(0) NULL DEFAULT NULL,
+ `workflow_id` bigint(0) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE INDEX `uidx01_wf_instance`(`wf_instance_id`) USING BTREE,
+ INDEX `idx01_wf_instance`(`workflow_id`, `status`, `app_id`, `expected_trigger_time`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for pj_workflow_node_info
+-- ----------------------------
+DROP TABLE IF EXISTS `pj_workflow_node_info`;
+CREATE TABLE `pj_workflow_node_info` (
+ `id` bigint(0) NOT NULL AUTO_INCREMENT,
+ `app_id` bigint(0) NOT NULL,
+ `enable` bit(1) NOT NULL,
+ `extra` longtext NULL,
+ `gmt_create` datetime(6) NULL,
+ `gmt_modified` datetime(6) NULL,
+ `job_id` bigint(0) NULL DEFAULT NULL,
+ `node_name` varchar(255) NULL DEFAULT NULL,
+ `node_params` longtext NULL,
+ `skip_when_failed` bit(1) NOT NULL,
+ `type` int(0) NULL DEFAULT NULL,
+ `workflow_id` bigint(0) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `idx01_workflow_node_info`(`workflow_id`, `gmt_create`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 ROW_FORMAT = Dynamic;
+
+