update 优化 !pr359 完成 powerjob 集成

This commit is contained in:
疯狂的狮子Li 2023-06-17 00:24:08 +08:00
parent effd504d48
commit bda0e0ec64
23 changed files with 235 additions and 266 deletions

View File

@ -1,10 +1,10 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="ruoyi-xxl-job-admin" type="docker-deploy" factoryName="dockerfile" server-name="Docker">
<configuration default="false" name="ruoyi-powerjob-server" type="docker-deploy" factoryName="dockerfile" server-name="Docker">
<deployment type="dockerfile">
<settings>
<option name="imageTag" value="ruoyi/ruoyi-xxl-job-admin:5.0.0" />
<option name="imageTag" value="ruoyi/ruoyi-powerjob-server:5.0.0" />
<option name="buildOnly" value="true" />
<option name="sourceFilePath" value="ruoyi-extend/ruoyi-xxl-job-admin/Dockerfile" />
<option name="sourceFilePath" value="ruoyi-extend/ruoyi-powerjob-server/Dockerfile" />
</settings>
</deployment>
<method v="2" />

View File

@ -11,15 +11,16 @@ spring.boot.admin.client:
--- # powerjob 配置
powerjob:
worker:
enabled: true
# 需要现在powerjob登录页执行应用注册后才能使用
# 如何开启调度中心请查看文档教程
enabled: false
# 需要先在 powerjob 登录页执行应用注册后才能使用
app-name: ruoyi-worker
enable-test-mode: false
max-appended-wf-context-length: 4096
max-result-length: 4096
port: 27777
protocol: http
server-address: 127.0.0.1:7700,127.0.0.1:7701
server-address: 127.0.0.1:7700
store-strategy: disk
--- # 数据源配置

View File

@ -14,15 +14,16 @@ spring.boot.admin.client:
--- # powerjob 配置
powerjob:
worker:
enabled: true
# 需要现在powerjob登录页执行应用注册后才能使用
# 如何开启调度中心请查看文档教程
enabled: false
# 需要先在 powerjob 登录页执行应用注册后才能使用
app-name: ruoyi-worker
enable-test-mode: false
max-appended-wf-context-length: 4096
max-result-length: 4096
port: 27777
protocol: http
server-address: 127.0.0.1:7700,127.0.0.1:7701
server-address: 127.0.0.1:7700
store-strategy: disk
--- # 数据源配置

View File

@ -0,0 +1,13 @@
FROM findepi/graalvm:java17-native
MAINTAINER Lion Li
RUN mkdir -p /ruoyi/powerjob/logs
WORKDIR /ruoyi/powerjob
EXPOSE 7700
ADD ./target/ruoyi-powerjob-server.jar ./app.jar
ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "app.jar"]

View File

@ -1,18 +1,16 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.dromara</groupId>
<artifactId>ruoyi-extend</artifactId>
<version>${revision}</version>
</parent>
<groupId>org.dromara</groupId>
<artifactId>ruoyi-powerjob-server</artifactId>
<version>${revision}</version>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>ruoyi-powerjob-server</artifactId>
<properties>
<spring-boot.version>2.7.4</spring-boot.version>
<spring-boot.version>2.7.12</spring-boot.version>
</properties>
<dependencyManagement>
<dependencies>

View File

@ -0,0 +1,25 @@
package org.dromara.powerjob;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
import tech.powerjob.server.common.utils.PropertyUtils;
/**
* powerjob 启动程序
*
* @author yhan219
*/
@Slf4j
@EnableScheduling
@SpringBootApplication(scanBasePackages = "tech.powerjob.server")
public class PowerJobServerApplication {
public static void main(String[] args) {
PropertyUtils.init();
SpringApplication.run(tech.powerjob.server.PowerJobServerApplication.class, args);
log.info("文档地址: https://www.yuque.com/powerjob/guidence/problem");
}
}

View File

@ -1,45 +0,0 @@
package org.dromara.powerjob.server;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
import tech.powerjob.server.PowerJobServerApplication;
import tech.powerjob.server.common.utils.PropertyUtils;
/**
* Admin 监控启动程序
*
* @author yhan219
*/
@Slf4j
@EnableScheduling
@SpringBootApplication(scanBasePackages = "tech.powerjob.server")
public class RuoyiPowerJobServerApplication {
private static final String TIPS = "\n\n" +
"******************* PowerJob Tips *******************\n" +
"如果应用无法启动,我们建议您仔细阅读以下文档来解决:\n" +
"if server can't startup, we recommend that you read the documentation to find a solution:\n" +
"https://www.yuque.com/powerjob/guidence/problem\n" +
"******************* PowerJob Tips *******************\n\n";
public static void main(String[] args) {
pre();
// Start SpringBoot application.
try {
SpringApplication.run(PowerJobServerApplication.class, args);
} catch (Throwable t) {
log.error(TIPS);
throw t;
}
}
private static void pre() {
log.info(TIPS);
PropertyUtils.init();
}
}

View File

@ -1,5 +1,4 @@
oms.env=DAILY
logging.config=classpath:logback-dev.xml
oms.env=dev
####### Database properties(Configure according to the the environment) #######
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver

View File

@ -1,42 +0,0 @@
oms.env=PRE
logging.config=classpath:logback-product.xml
####### Database properties(Configure according to the the environment) #######
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-pre?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.maximum-pool-size=20
spring.datasource.core.minimum-idle=5
####### MongoDB properties(Non-core configuration properties) #######
####### delete mongodb config to disable mongodb #######
oms.mongodb.enable=true
spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-pre
####### Email properties(Non-core configuration properties) #######
####### Delete the following code to disable the mail #######
spring.mail.host=smtp.qq.com
spring.mail.username=zqq
spring.mail.password=qqz
spring.mail.properties.mail.smtp.auth=true
spring.mail.properties.mail.smtp.starttls.enable=true
spring.mail.properties.mail.smtp.starttls.required=true
####### DingTalk properties(Non-core configuration properties) #######
####### Delete the following code to disable the DingTalk #######
oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
oms.alarm.ding.agent-id=847044348
####### Resource cleaning properties #######
oms.instanceinfo.retention=3
oms.container.retention.local=3
oms.container.retention.remote=-1
####### Cache properties #######
oms.instance.metadata.cache.size=1024
####### Threshold in precise fetching server(0~100). 100 means full detection of server, in which #######
####### split-brain could be avoided while performance overhead would increase. #######
oms.accurate.select.server.percentage = 50

View File

@ -1,33 +1,32 @@
oms.env=PRODUCT
logging.config=classpath:logback-product.xml
oms.env=prod
####### Database properties(Configure according to the the environment) #######
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/ry-vue?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.password=root
spring.datasource.core.maximum-pool-size=20
spring.datasource.core.minimum-idle=5
####### MongoDB properties(Non-core configuration properties) #######
####### delete mongodb config to disable mongodb #######
oms.mongodb.enable=true
spring.data.mongodb.uri=mongodb://localhost:27017/powerjob-product
#oms.mongodb.enable=true
#spring.data.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
####### Email properties(Non-core configuration properties) #######
####### Delete the following code to disable the mail #######
spring.mail.host=smtp.qq.com
spring.mail.username=zqq
spring.mail.password=qqz
spring.mail.properties.mail.smtp.auth=true
spring.mail.properties.mail.smtp.starttls.enable=true
spring.mail.properties.mail.smtp.starttls.required=true
#spring.mail.host=smtp.163.com
#spring.mail.username=zqq@163.com
#spring.mail.password=GOFZPNARMVKCGONV
#spring.mail.properties.mail.smtp.auth=true
#spring.mail.properties.mail.smtp.starttls.enable=true
#spring.mail.properties.mail.smtp.starttls.required=true
####### DingTalk properties(Non-core configuration properties) #######
####### Delete the following code to disable the DingTalk #######
oms.alarm.ding.app-key=
oms.alarm.ding.app-secret=
oms.alarm.ding.agent-id=
#oms.alarm.ding.app-key=dingauqwkvxxnqskknfv
#oms.alarm.ding.app-secret=XWrEPdAZMPgJeFtHuL0LH73LRj-74umF2_0BFcoXMfvnX0pCQvt0rpb1JOJU_HLl
#oms.alarm.ding.agent-id=847044348
####### Resource cleaning properties #######
oms.instanceinfo.retention=7

View File

@ -1,11 +1,12 @@
# Http server port
server.port=7700
spring.profiles.active=daily
spring.profiles.active=@profiles.active@
spring.main.banner-mode=log
spring.jpa.open-in-view=false
spring.data.mongodb.repositories.type=none
logging.level.org.mongodb=warn
logging.config: classpath:logback-plus.xml
# Configuration for uploading files.
spring.servlet.multipart.enabled=true

View File

@ -1,15 +1,11 @@
${AnsiColor.GREEN}
███████ ██ ██
░██░░░░██ ░██ ░██
░██ ░██ ██████ ███ ██ █████ ██████ ░██ ██████ ░██
░███████ ██░░░░██░░██ █ ░██ ██░░░██░░██░░█ ░██ ██░░░░██░██████
░██░░░░ ░██ ░██ ░██ ███░██░███████ ░██ ░ ░██░██ ░██░██░░░██
░██ ░██ ░██ ░████░████░██░░░░ ░██ ██ ░██░██ ░██░██ ░██
░██ ░░██████ ███░ ░░░██░░██████░███ ░░█████ ░░██████ ░██████
░░ ░░░░░░ ░░░ ░░░ ░░░░░░ ░░░ ░░░░░ ░░░░░░ ░░░░░
${AnsiColor.BRIGHT_RED}
* Maintainer: tengjiqi@gmail.com & Team PowerJob
* OfficialWebsite: http://www.powerjob.tech/
* SourceCode: https://github.com/PowerJob/PowerJob
* PoweredBy: SpringBoot${spring-boot.formatted-version}
${AnsiColor.DEFAULT}
Application Version: ${revision}
Spring Boot Version: ${spring-boot.version}
_ _
(_) | |
_ __ _____ _____ _ __ _ ___ | |__ ______ ___ ___ _ ____ _____ _ __
| '_ \ / _ \ \ /\ / / _ \ '__| |/ _ \| '_ \______/ __|/ _ \ '__\ \ / / _ \ '__|
| |_) | (_) \ V V / __/ | | | (_) | |_) | \__ \ __/ | \ V / __/ |
| .__/ \___/ \_/\_/ \___|_| | |\___/|_.__/ |___/\___|_| \_/ \___|_|
| | _/ |
|_| |__/

View File

@ -1,38 +0,0 @@
<?xml version="1.0"?>
<!-- Configuration for local environment, all logs would print in console. -->
<configuration>
<!-- Configure color for logs. -->
<!-- Classes for rendering color. -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
<conversionRule conversionWord="wex"
converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
<conversionRule conversionWord="wEx"
converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
<!-- Color log pattern. -->
<property name="CONSOLE_LOG_PATTERN"
value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{20}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<logger name="MONITOR_LOGGER_DB_OPERATION" level="OFF"/>
<!-- Configuration for console output. -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- Quit timing logs, which seems disordered. -->
<logger name="tech.powerjob.server.service.timing" level="WARN" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="tech.powerjob" level="DEBUG" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
</root>
</configuration>

View File

@ -0,0 +1,132 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<property name="log.path" value="./logs"/>
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
<!-- include other logback configs -->
<include resource="logback-config/powerjob_monitor.xml"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<!-- 控制台输出 -->
<appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/sys-console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/sys-info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/sys-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- info异步输出 -->
<appender name="async_info" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_info"/>
</appender>
<!-- error异步输出 -->
<appender name="async_error" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_error"/>
</appender>
<!-- 整合 skywalking 控制台输出 tid -->
<!-- <appender name="console" class="ch.qos.logback.core.ConsoleAppender">-->
<!-- <encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">-->
<!-- <layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">-->
<!-- <pattern>[%tid] ${console.log.pattern}</pattern>-->
<!-- </layout>-->
<!-- <charset>utf-8</charset>-->
<!-- </encoder>-->
<!-- </appender>-->
<!-- 整合 skywalking 推送采集日志 -->
<!-- <appender name="sky_log" class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.log.GRPCLogClientAppender">-->
<!-- <encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">-->
<!-- <layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">-->
<!-- <pattern>[%tid] ${console.log.pattern}</pattern>-->
<!-- </layout>-->
<!-- <charset>utf-8</charset>-->
<!-- </encoder>-->
<!-- </appender>-->
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
<appender-ref ref="async_info" />
<appender-ref ref="async_error" />
<appender-ref ref="file_console" />
<!-- <appender-ref ref="sky_log"/>-->
</root>
</configuration>

View File

@ -1,78 +0,0 @@
<?xml version="1.0"?>
<!-- Configuration for production environment. -->
<configuration>
<!-- Default configuration. -->
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- Configuration for console. -->
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
<!--
Log path, pay attention to permission, logs may be unable to generate.
Bug recording: Setting `~/logs`, is unable to create folder in user home directory,
a folder with the name ~ is created in project folder.
-->
<property name="LOG_PATH" value="${user.home}/powerjob/server/logs"/>
<!-- include other logback configs -->
<include resource="logback-config/powerjob_monitor.xml"/>
<!-- Configuration for ERROR logs. All error logs will write twice. -->
<appender name="ERROR_APPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/powerjob-server-error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_PATH}/powerjob-server-error.%d{yyyy-MM-dd}.log</FileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{20} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- End of configuration for ERROR logs. -->
<!-- Configuration for Web services. -->
<appender name="WEB_LOG_APPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/powerjob-server-web.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_PATH}/powerjob-server-web.%d{yyyy-MM-dd}.log</FileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<append>true</append>
</appender>
<logger name="WEB_LOG" level="INFO" additivity="false">
<appender-ref ref="WEB_LOG_APPENDER"/>
</logger>
<!-- End of configuration for Web services. -->
<!-- Configuration for system logs. -->
<appender name="DEFAULT_APPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/powerjob-server-application.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<FileNamePattern>${LOG_PATH}/powerjob-server-application.%d{yyyy-MM-dd}.log</FileNamePattern>
<MaxHistory>7</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{20} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<append>true</append>
</appender>
<!-- End of configuration for system logs. -->
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="DEFAULT_APPENDER"/>
<appender-ref ref="ERROR_APPENDER"/>
</root>
</configuration>

View File

@ -20,7 +20,7 @@
<!-- 通用工具-->
<dependency>
<groupId>org.dromara</groupId>
<artifactId>ruoyi-common-core</artifactId>
<artifactId>ruoyi-common-json</artifactId>
</dependency>
<dependency>
@ -28,7 +28,6 @@
<artifactId>ruoyi-common-job</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -4,8 +4,8 @@ import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.dromara.common.json.utils.JsonUtils;
import org.springframework.stereotype.Component;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.sdk.MapProcessor;
@ -37,7 +37,7 @@ public class MapProcessorDemo implements MapProcessor {
log.info("============== MapProcessorDemo#process ==============");
log.info("isRootTask:{}", isRootTask());
log.info("taskContext:{}", JsonUtils.toJSONString(context));
log.info("taskContext:{}", JsonUtils.toJsonString(context));
if (isRootTask()) {
log.info("==== MAP ====");

View File

@ -1,15 +1,14 @@
package org.dromara.job.processors;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import cn.hutool.core.lang.Dict;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.json.utils.JsonUtils;
import org.springframework.stereotype.Component;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.core.processor.TaskResult;
@ -37,10 +36,10 @@ public class MapReduceProcessorDemo implements MapReduceProcessor {
log.info("============== TestMapReduceProcessor#process ==============");
log.info("isRootTask:{}", isRootTask());
log.info("taskContext:{}", JsonUtils.toJSONString(context));
log.info("taskContext:{}", JsonUtils.toJsonString(context));
// 根据控制台参数获取MR批次及子任务大小
final JSONObject jobParams = JSONObject.parseObject(context.getJobParams());
final Dict jobParams = JsonUtils.parseMap(context.getJobParams());
Integer batchSize = (Integer) jobParams.getOrDefault("batchSize", 100);
Integer batchNum = (Integer) jobParams.getOrDefault("batchNum", 10);
@ -61,8 +60,8 @@ public class MapReduceProcessorDemo implements MapReduceProcessor {
return new ProcessResult(true, "MAP_SUCCESS");
} else {
log.info("==== NORMAL_PROCESS ====");
omsLogger.info("[DemoMRProcessor] process subTask: {}.", JSON.toJSONString(context.getSubTask()));
log.info("subTask: {}", JsonUtils.toJSONString(context.getSubTask()));
omsLogger.info("[DemoMRProcessor] process subTask: {}.", JsonUtils.toJsonString(context.getSubTask()));
log.info("subTask: {}", JsonUtils.toJsonString(context.getSubTask()));
Thread.sleep(1000);
if (context.getCurrentRetryTimes() == 0) {
return new ProcessResult(false, "FIRST_FAILED");
@ -75,8 +74,8 @@ public class MapReduceProcessorDemo implements MapReduceProcessor {
@Override
public ProcessResult reduce(TaskContext context, List<TaskResult> taskResults) {
log.info("================ MapReduceProcessorDemo#reduce ================");
log.info("TaskContext: {}", JSONObject.toJSONString(context));
log.info("List<TaskResult>: {}", JSONObject.toJSONString(taskResults));
log.info("TaskContext: {}", JsonUtils.toJsonString(context));
log.info("List<TaskResult>: {}", JsonUtils.toJsonString(taskResults));
context.getOmsLogger().info("MapReduce job finished, result is {}.", taskResults);
boolean success = ThreadLocalRandom.current().nextBoolean();

View File

@ -1,4 +1,4 @@
INSERT INTO `pj_app_info` (`id`, `app_name`, `current_server`, `gmt_create`, `gmt_modified`, `password`) VALUES (1, 'ruoyi-worker', '192.168.31.80:10010', '2023-06-13 16:32:59.263000', '2023-06-13 16:33:29.898000', 'ruoyi-worker');
INSERT INTO `pj_app_info` (`id`, `app_name`, `current_server`, `gmt_create`, `gmt_modified`, `password`) VALUES (1, 'ruoyi-worker', '192.168.31.80:10010', '2023-06-13 16:32:59.263000', '2023-06-13 16:33:29.898000', '123456');
INSERT INTO `pj_job_info` (`id`, `alarm_config`, `app_id`, `concurrency`, `designated_workers`, `dispatch_strategy`, `execute_type`, `extra`, `gmt_create`, `gmt_modified`, `instance_retry_num`, `instance_time_limit`, `job_description`, `job_name`, `job_params`, `lifecycle`, `log_config`, `max_instance_num`, `max_worker_count`, `min_cpu_cores`, `min_disk_space`, `min_memory_space`, `next_trigger_time`, `notify_user_ids`, `processor_info`, `processor_type`, `status`, `tag`, `task_retry_num`, `time_expression`, `time_expression_type`) VALUES (1, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 2, 1, NULL, '2023-06-02 15:01:27.717000', '2023-06-02 16:03:19.462000', 1, 0, '', '单机处理器执行测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.StandaloneProcessorDemo', 1, 1, NULL, 1, '30000', 3);
INSERT INTO `pj_job_info` (`id`, `alarm_config`, `app_id`, `concurrency`, `designated_workers`, `dispatch_strategy`, `execute_type`, `extra`, `gmt_create`, `gmt_modified`, `instance_retry_num`, `instance_time_limit`, `job_description`, `job_name`, `job_params`, `lifecycle`, `log_config`, `max_instance_num`, `max_worker_count`, `min_cpu_cores`, `min_disk_space`, `min_memory_space`, `next_trigger_time`, `notify_user_ids`, `processor_info`, `processor_type`, `status`, `tag`, `task_retry_num`, `time_expression`, `time_expression_type`) VALUES (2, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 2, NULL, '2023-06-02 15:04:45.342000', '2023-06-02 16:04:09.736000', 0, 0, NULL, '广播处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.BroadcastProcessorDemo', 1, 1, NULL, 1, '30000', 3);
INSERT INTO `pj_job_info` (`id`, `alarm_config`, `app_id`, `concurrency`, `designated_workers`, `dispatch_strategy`, `execute_type`, `extra`, `gmt_create`, `gmt_modified`, `instance_retry_num`, `instance_time_limit`, `job_description`, `job_name`, `job_params`, `lifecycle`, `log_config`, `max_instance_num`, `max_worker_count`, `min_cpu_cores`, `min_disk_space`, `min_memory_space`, `next_trigger_time`, `notify_user_ids`, `processor_info`, `processor_type`, `status`, `tag`, `task_retry_num`, `time_expression`, `time_expression_type`) VALUES (3, '{\"alertThreshold\":0,\"silenceWindowLen\":0,\"statisticWindowLen\":0}', 1, 5, '', 1, 4, NULL, '2023-06-02 15:13:23.519000', '2023-06-02 16:03:22.421000', 0, 0, NULL, 'Map处理器测试', NULL, '{}', '{\"type\":1}', 0, 0, 0, 0, 0, NULL, NULL, 'org.dromara.job.processors.MapProcessorDemo', 1, 2, NULL, 1, '1000', 3);

View File

@ -1,3 +1,5 @@
ALTER TABLE gen_table ADD (data_name VARCHAR2(200) DEFAULT '');
COMMENT ON COLUMN gen_table.data_name IS '数据源名称';
UPDATE sys_menu SET path = 'powerjob', component = 'monitor/powerjob/index', perms = 'monitor:powerjob:list', remark = 'powerjob控制台菜单' WHERE menu_id = 120;

View File

@ -1,3 +1,5 @@
ALTER TABLE gen_table ADD data_name varchar(200) default ''::varchar;
COMMENT ON COLUMN gen_table.data_name IS '数据源名称';
UPDATE sys_menu SET path = 'powerjob', component = 'monitor/powerjob/index', perms = 'monitor:powerjob:list', remark = 'powerjob控制台菜单' WHERE menu_id = 120;

View File

@ -7,3 +7,6 @@ EXEC sp_addextendedproperty
'TABLE', N'gen_table',
'COLUMN', N'data_name'
GO
UPDATE sys_menu SET path = 'powerjob', component = 'monitor/powerjob/index', perms = 'monitor:powerjob:list', remark = 'powerjob控制台菜单' WHERE menu_id = 120
GO

View File

@ -1 +1,3 @@
ALTER TABLE gen_table ADD COLUMN data_name varchar(200) NULL DEFAULT '' COMMENT '数据源名称' AFTER table_id;
UPDATE sys_menu SET path = 'powerjob', component = 'monitor/powerjob/index', perms = 'monitor:powerjob:list', remark = 'powerjob控制台菜单' WHERE menu_id = 120