Merge branch 'dev' of https://gitea.qinmian.online/cbx/srt_cloud
commit
fa904f607d
1
pom.xml
1
pom.xml
|
@ -22,6 +22,7 @@
|
|||
<module>srt-cloud-data-integrate</module>
|
||||
<module>srt-cloud-system</module>
|
||||
<module>srt-cloud-gateway</module>
|
||||
<module>srt-data-development</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
|
|
|
@ -95,5 +95,10 @@ public class DataAccessDto implements Serializable {
|
|||
@JsonFormat(pattern = DateUtils.DATE_TIME_PATTERN)
|
||||
private Date updateTime;
|
||||
|
||||
@Schema(description = "判断是否是datax同步")
|
||||
private String isDatax;
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -64,4 +64,7 @@ public class DataAccessClientDto {
|
|||
private List<PatternMapper> tableNameMapper;
|
||||
@Schema(description = "字段名名映射")
|
||||
private List<PatternMapper> columnNameMapper;
|
||||
@Schema(description = "同步类型")
|
||||
private String isDatax;
|
||||
|
||||
}
|
||||
|
|
|
@ -115,5 +115,7 @@ public class DataAccessEntity extends BaseEntity {
|
|||
*/
|
||||
private Date nextRunTime;
|
||||
|
||||
private Integer isDatax;
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -7,8 +7,17 @@ import lombok.Getter;
|
|||
@AllArgsConstructor
|
||||
public enum IsJdbc {
|
||||
YES("是"),
|
||||
NO("否");
|
||||
|
||||
NO("否")
|
||||
;
|
||||
private String value;
|
||||
public static IsJdbc getByValue(String value) {
|
||||
for (IsJdbc isJdbc : IsJdbc.values()) {
|
||||
if (isJdbc.value.equals(value)) {
|
||||
return isJdbc;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -40,12 +40,22 @@ import net.srt.vo.SqlGenerationVo;
|
|||
import net.srt.vo.TableVo;
|
||||
import org.apache.commons.net.ftp.FTP;
|
||||
import org.apache.commons.net.ftp.FTPClient;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.kafka.clients.admin.AdminClient;
|
||||
import org.apache.kafka.clients.admin.KafkaAdminClient;
|
||||
import org.apache.kafka.clients.admin.ListTopicsResult;
|
||||
import org.apache.kafka.clients.producer.Producer;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.kafka.common.KafkaFuture;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.indices.GetIndexRequest;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import redis.clients.jedis.Jedis;
|
||||
import redis.clients.jedis.exceptions.JedisConnectionException;
|
||||
import srt.cloud.framework.dbswitch.common.type.ProductTypeEnum;
|
||||
import srt.cloud.framework.dbswitch.common.util.StringUtil;
|
||||
import srt.cloud.framework.dbswitch.core.model.ColumnDescription;
|
||||
|
@ -58,9 +68,8 @@ import javax.annotation.Resource;
|
|||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
|
@ -119,7 +128,6 @@ public class DataDatabaseServiceImpl extends BaseServiceImpl<DataDatabaseDao, Da
|
|||
testOnline(DataDatabaseConvert.INSTANCE.convert(entity));
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -342,6 +350,93 @@ public class DataDatabaseServiceImpl extends BaseServiceImpl<DataDatabaseDao, Da
|
|||
|
||||
private List<TableVo> getTables(DataDatabaseEntity dataDatabaseEntity) {
|
||||
ProductTypeEnum productTypeEnum = ProductTypeEnum.getByIndex(dataDatabaseEntity.getDatabaseType());
|
||||
switch (productTypeEnum) {
|
||||
case REDIS:
|
||||
//判断是不是redis数据库
|
||||
if (productTypeEnum == ProductTypeEnum.REDIS) {
|
||||
try (Jedis jedis = new Jedis(dataDatabaseEntity.getDatabaseIp(), Integer.parseInt(dataDatabaseEntity.getDatabasePort()))) {
|
||||
// 获取所有key
|
||||
Set<String> keys = jedis.keys("*");
|
||||
List<TableVo> tableVos = keys.stream()
|
||||
.map(key -> {
|
||||
TableVo tableVo = new TableVo();
|
||||
tableVo.setTableName(key);
|
||||
return tableVo;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
return tableVos;
|
||||
} catch (JedisConnectionException e) {
|
||||
// 处理连接异常
|
||||
throw new RuntimeException("redis连接异常:" + e.getMessage());
|
||||
}
|
||||
}
|
||||
case ES:
|
||||
// 从数据实体中获取 Elasticsearch 服务器的地址和端口
|
||||
String esServerAddress = dataDatabaseEntity.getDatabaseIp();
|
||||
int esServerPort = Integer.parseInt(dataDatabaseEntity.getDatabasePort());
|
||||
|
||||
// 创建 RestHighLevelClient 实例,用于与 Elasticsearch 进行高级 REST 客户端通信
|
||||
RestHighLevelClient client = new RestHighLevelClient(
|
||||
RestClient.builder(
|
||||
new HttpHost(esServerAddress, esServerPort, "http")));
|
||||
try {
|
||||
// 创建获取索引请求,使用通配符 "*" 表示获取所有索引
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest("*");
|
||||
// 发送获取索引请求,获取所有索引的名称
|
||||
String[] indexes = client.indices().get(getIndexRequest, RequestOptions.DEFAULT).getIndices();
|
||||
// 创建一个列表用于存储索引信息
|
||||
List<TableVo> esTableVos = new ArrayList<>();
|
||||
// 遍历所有索引名称,创建 TableVo 对象并添加到列表中
|
||||
for (String index : indexes) {
|
||||
TableVo tableVo = new TableVo();
|
||||
tableVo.setTableName(index);
|
||||
esTableVos.add(tableVo);
|
||||
}
|
||||
// 返回 Elasticsearch 索引信息列表
|
||||
return esTableVos;
|
||||
} catch (IOException e) {
|
||||
// 处理 IO 异常
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
try {
|
||||
// 在 finally 块中确保关闭 RestHighLevelClient
|
||||
client.close();
|
||||
} catch (IOException e) {
|
||||
// 处理关闭异常
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
// 如果出现异常或无法获取索引信息,则返回一个空列表
|
||||
return Collections.emptyList();
|
||||
case KAFKA:
|
||||
String bootstrapServers = dataDatabaseEntity.getDatabaseIp() + ":" + dataDatabaseEntity.getDatabasePort();
|
||||
Properties properties = new Properties();
|
||||
properties.put("bootstrap.servers", bootstrapServers);
|
||||
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
|
||||
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
|
||||
AdminClient adminClient = null;
|
||||
try {
|
||||
adminClient = KafkaAdminClient.create(properties);
|
||||
ListTopicsResult result = adminClient.listTopics();
|
||||
KafkaFuture<Set<String>> names = result.names();
|
||||
Set<String> topicSet = names.get();
|
||||
|
||||
List<TableVo> kafkaTableVos = topicSet.stream()
|
||||
.map(TableVo::createKafkaTopicVo)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return kafkaTableVos;
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (adminClient != null) {
|
||||
adminClient.close();
|
||||
}
|
||||
}
|
||||
return Collections.emptyList();
|
||||
case MONGODB:
|
||||
break;
|
||||
}
|
||||
List<TableDescription> tableDescriptions = new ArrayList<>();
|
||||
try {
|
||||
switch (productTypeEnum) {
|
||||
|
|
|
@ -99,5 +99,10 @@ public class DataAccessVO implements Serializable {
|
|||
@JsonFormat(pattern = DateUtils.DATE_TIME_PATTERN)
|
||||
private Date updateTime;
|
||||
|
||||
@Schema(description = "是否是datax任务")
|
||||
private String isDatax;
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ spring:
|
|||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.0
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -40,6 +40,7 @@ public class SecurityFilterConfig {
|
|||
.and().authorizeRequests()
|
||||
.antMatchers(permits).permitAll()
|
||||
.antMatchers(HttpMethod.OPTIONS).permitAll()
|
||||
.antMatchers("/datax/**").permitAll()
|
||||
.anyRequest().authenticated()
|
||||
.and().exceptionHandling().authenticationEntryPoint(new SecurityAuthenticationEntryPoint())
|
||||
.and().headers().frameOptions().disable()
|
||||
|
|
|
@ -78,19 +78,25 @@ spring:
|
|||
- Path=/v3/api-docs/**
|
||||
filters:
|
||||
- RewritePath=/v3/api-docs/(?<path>.*), /$\{path}/v3/api-docs
|
||||
- id: srt-cloud-datax-service # New Gateway
|
||||
uri: lb://srt-cloud-datax-service # Update with the correct URI for your new service
|
||||
predicates:
|
||||
- Path=/srt-cloud-datax-service/** # Adjust the path as needed
|
||||
filters:
|
||||
- StripPrefix=1
|
||||
|
||||
nacos:
|
||||
discovery:
|
||||
server-addr: 101.34.77.101:8848
|
||||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.0
|
||||
group: srt2.1
|
||||
|
||||
springdoc:
|
||||
swagger-ui:
|
||||
path: doc.html
|
||||
|
||||
|
||||
logging:
|
||||
level:
|
||||
org:
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
<modules>
|
||||
<module>srt-cloud-quartz</module>
|
||||
<module>srt-cloud-message</module>
|
||||
<module>srt-cloud-datax</module>
|
||||
<module>srt-cloud-datax-service</module>
|
||||
</modules>
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,210 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-module</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>srt-cloud-datax-service</artifactId>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-mybatis</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-api</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<!--使用log42j-->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-log4j2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-bootstrap</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
|
||||
</dependency>
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>com.github.xiaoymin</groupId>-->
|
||||
<!-- <artifactId>knife4j-springdoc-ui</artifactId>-->
|
||||
<!-- </dependency>-->
|
||||
<dependency>
|
||||
<groupId>com.aliyun</groupId>
|
||||
<artifactId>dysmsapi20170525</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.tencentcloudapi</groupId>
|
||||
<artifactId>tencentcloud-sdk-java</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.qiniu</groupId>
|
||||
<artifactId>qiniu-java-sdk</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<!--<finalName>${project.artifactId}</finalName>-->
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>appassembler-maven-plugin</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<!-- 如果不配置 generate-daemons,则打包命令为 mvn clean package appassembler:assemble -->
|
||||
<!-- 如果配置了 generate-daemons,打包命令可以是 mvn clean package 也可以是 mvn clean package appassembler:assemble -->
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-jsw-scripts</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>generate-daemons</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
|
||||
<configuration>
|
||||
<!-- flat与lib共同决定将项目用的的所有jar包复制到lib目录下 -->
|
||||
<repositoryLayout>flat</repositoryLayout>
|
||||
<!--从哪里copy配置文件-->
|
||||
<configurationSourceDirectory>src/main/resources</configurationSourceDirectory>
|
||||
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
|
||||
<!--是否copy配置文件-->
|
||||
<copyConfigurationDirectory>true</copyConfigurationDirectory>
|
||||
<!--配置文件存放在conf目录路径-->
|
||||
<configurationDirectory>conf</configurationDirectory>
|
||||
<!-- 打包的jar,以及maven依赖的jar放到这个目录里面 -->
|
||||
<repositoryName>lib</repositoryName>
|
||||
<!-- 可执行脚本的目录 -->
|
||||
<binFolder>bin</binFolder>
|
||||
<encoding>UTF-8</encoding>
|
||||
<logsDirectory>logs</logsDirectory>
|
||||
|
||||
<daemons>
|
||||
<daemon>
|
||||
<id>${project.artifactId}</id>
|
||||
<mainClass>net.srt.QuartzApplication</mainClass>
|
||||
<platforms>
|
||||
<platform>jsw</platform>
|
||||
</platforms>
|
||||
<generatorConfigurations>
|
||||
<generatorConfiguration>
|
||||
<generator>jsw</generator>
|
||||
<includes>
|
||||
<include>linux-x86-32</include>
|
||||
<include>linux-x86-64</include>
|
||||
<include>windows-x86-32</include>
|
||||
<include>windows-x86-64</include>
|
||||
</includes>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>configuration.directory.in.classpath.first</name>
|
||||
<value>conf</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.ping.timeout</name>
|
||||
<value>120</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>set.default.REPO_DIR</name>
|
||||
<value>lib</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.logfile</name>
|
||||
<value>logs/wrapper.log</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</generatorConfiguration>
|
||||
</generatorConfigurations>
|
||||
<jvmSettings>
|
||||
<!-- jvm参数 -->
|
||||
<!--<systemProperties>
|
||||
<systemProperty>com.sun.management.jmxremote</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.port=1984</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.authenticate=false</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.ssl=false</systemProperty>
|
||||
</systemProperties>-->
|
||||
<extraArguments>
|
||||
<extraArgument>-server</extraArgument>
|
||||
<extraArgument>-Dfile.encoding=utf-8</extraArgument>
|
||||
<extraArgument>-Xms128m</extraArgument>
|
||||
<extraArgument>-Xmx2048m</extraArgument>
|
||||
<extraArgument>-XX:+PrintGCDetails</extraArgument><!--输出GC的详细日志-->
|
||||
<extraArgument>-XX:+PrintGCDateStamps</extraArgument><!--输出GC的时间戳-->
|
||||
<extraArgument>-Xloggc:logs/gc.log</extraArgument><!--日志文件的输出路径-->
|
||||
</extraArguments>
|
||||
</jvmSettings>
|
||||
</daemon>
|
||||
</daemons>
|
||||
<programs>
|
||||
<program>
|
||||
<mainClass>net.srt.QuartzApplication</mainClass>
|
||||
<id>${project.artifactId}</id>
|
||||
</program>
|
||||
</programs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!--打包 日常调试打包可以把该组件注释掉,不然install的速度比较慢-->
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${project.parent.parent.basedir}/assembly/assembly-win.xml</descriptor>
|
||||
<descriptor>${project.parent.parent.basedir}/assembly/assembly-linux.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-assembly</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>-->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<skipTests>true</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,20 @@
|
|||
package net.srt;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
|
||||
/**
|
||||
* @ClassName StuApp
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@EnableDiscoveryClient
|
||||
@SpringBootApplication
|
||||
public class DataxServiceApp {
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(DataxServiceApp.class);
|
||||
System.out.println("Hello world!");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
package net.srt.datax.controllor;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import lombok.AllArgsConstructor;
|
||||
import net.srt.datax.server.SrtDataxService;
|
||||
import net.srt.datax.vo.StrDatax;
|
||||
import net.srt.framework.common.utils.Result;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @ClassName StuController
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/dataxService")
|
||||
@Tag(name = "datax库表")
|
||||
public class SrtDataxController {
|
||||
@Autowired
|
||||
private SrtDataxService dataxService;
|
||||
|
||||
@Operation(summary = "查询列表")
|
||||
@PostMapping("/list")
|
||||
public Result<List<StrDatax>> list() {
|
||||
List<StrDatax> stuList = dataxService.dataxList();
|
||||
return Result.ok(stuList);
|
||||
}
|
||||
|
||||
@Operation(summary = "新增")
|
||||
@PostMapping("/add")
|
||||
public Result<StrDatax> add(@RequestBody StrDatax strDatax) {
|
||||
dataxService.add(strDatax);
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
@Operation(summary = "修改列表")
|
||||
@PostMapping("/update")
|
||||
public Result<StrDatax> update(@RequestBody StrDatax strDatax) {
|
||||
dataxService.updateStrDatax(strDatax);
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
@Operation(summary = "删除列表")
|
||||
@PostMapping("/del/{id}")
|
||||
public Result<String> del(@PathVariable Long id) {
|
||||
dataxService.delStrDatax(id);
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
@Operation(summary = "回显")
|
||||
@PostMapping("/findById/{id}")
|
||||
public Result<StrDatax> findById(@PathVariable Integer id) {
|
||||
if (id!= null) {
|
||||
return Result.ok(dataxService.findById(id));
|
||||
}
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
package net.srt.datax.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import net.srt.datax.vo.StrDatax;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
@Mapper
|
||||
public interface SrtDataxMapper extends BaseMapper<StrDatax> {
|
||||
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package net.srt.datax.server;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.IService;
|
||||
import net.srt.datax.vo.StrDatax;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @ClassName StrDataxService
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
public interface SrtDataxService extends IService<StrDatax> {
|
||||
|
||||
|
||||
|
||||
void add(StrDatax StrDatax);
|
||||
|
||||
List<StrDatax> dataxList();
|
||||
|
||||
void delStrDatax(Long id);
|
||||
|
||||
void updateStrDatax(StrDatax StrDatax);
|
||||
|
||||
StrDatax findById(Integer id);
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
package net.srt.datax.server.impl;
|
||||
|
||||
import cn.hutool.http.server.HttpServerRequest;
|
||||
import lombok.AllArgsConstructor;
|
||||
import net.srt.datax.mapper.SrtDataxMapper;
|
||||
import net.srt.datax.server.SrtDataxService;
|
||||
import net.srt.datax.vo.StrDatax;
|
||||
import net.srt.framework.mybatis.service.impl.BaseServiceImpl;
|
||||
import net.srt.framework.security.cache.TokenStoreCache;
|
||||
import net.srt.framework.security.user.UserDetail;
|
||||
import net.srt.framework.security.utils.TokenUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @ClassName StrDataxServiceImpl
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@Service
|
||||
@AllArgsConstructor
|
||||
public class StrDataxServiceImpl extends BaseServiceImpl<SrtDataxMapper, StrDatax> implements SrtDataxService {
|
||||
|
||||
private SrtDataxMapper dataxMapper;
|
||||
|
||||
private final TokenStoreCache tokenStoreCache;
|
||||
|
||||
private HttpServletRequest request;
|
||||
|
||||
|
||||
@Override
|
||||
public void add(StrDatax strDatax) {
|
||||
UserDetail user = getUser();
|
||||
strDatax.setCreateUser(user.getUsername());
|
||||
strDatax.setCreateTime(new Date());
|
||||
dataxMapper.insert(strDatax);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<StrDatax> dataxList() {
|
||||
List<StrDatax> StrDataxs = dataxMapper.selectList(null);
|
||||
return StrDataxs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delStrDatax(Long id) {
|
||||
dataxMapper.deleteById(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateStrDatax(StrDatax strDatax) {
|
||||
UserDetail user = getUser();
|
||||
strDatax.setUpdateUser(user.getUsername());
|
||||
strDatax.setUpdateTime(new Date());
|
||||
dataxMapper.updateById(strDatax);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StrDatax findById(Integer id) {
|
||||
return dataxMapper.selectById(id);
|
||||
}
|
||||
|
||||
|
||||
private UserDetail getUser() {
|
||||
String accessToken = TokenUtils.getAccessToken(request);
|
||||
UserDetail user = tokenStoreCache.getUser(accessToken);
|
||||
return user;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
package net.srt.datax.test;
|
||||
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
|
||||
import java.sql.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
@Log4j2
|
||||
public class OptimizedDataGeneration {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String jdbcUrl = "jdbc:mysql://122.51.52.153:3306/jpz01?rewriteBatchedStatements=true&useServerPrepStmts=false";
|
||||
String username = "root";
|
||||
String password = "root";
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
try (Connection connection = DriverManager.getConnection(jdbcUrl, username, password)) {
|
||||
connection.setAutoCommit(false); // Disable auto-commit
|
||||
|
||||
// Generate and insert data
|
||||
int numberOfRecords = 30000000;
|
||||
int batchSize = 60000; // Adjust the batch size as needed
|
||||
int numberOfThreads = 9; // Adjust the number of threads as needed
|
||||
|
||||
// Disable indexes and constraints before inserting data
|
||||
connection.createStatement().execute("ALTER TABLE t_user DISABLE KEYS");
|
||||
|
||||
// Insert data using the optimized method
|
||||
insertData(connection, numberOfRecords, batchSize, numberOfThreads);
|
||||
|
||||
// Enable indexes and constraints after inserting data
|
||||
connection.createStatement().execute("ALTER TABLE t_user ENABLE KEYS");
|
||||
|
||||
connection.commit(); // Commit changes
|
||||
|
||||
} catch (SQLException | InterruptedException | ExecutionException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
long totalTime = System.currentTimeMillis() - startTime;
|
||||
log.info("Total execution time: {} ms", totalTime);
|
||||
}
|
||||
|
||||
private static void insertData(Connection connection, int numberOfRecords, int batchSize, int numberOfThreads)
|
||||
throws SQLException, InterruptedException, ExecutionException {
|
||||
// Example of using ExecutorService for parallel insertion
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(numberOfThreads);
|
||||
List<Future<Void>> futures = new ArrayList<>();
|
||||
|
||||
for (int i = 0; i < numberOfThreads; i++) {
|
||||
final int startIndex = i * (numberOfRecords / numberOfThreads);
|
||||
final int endIndex = (i + 1) * (numberOfRecords / numberOfThreads);
|
||||
|
||||
Future<Void> future = executorService.submit(() -> {
|
||||
insertDataBatch(connection, endIndex - startIndex, batchSize, startIndex + 1);
|
||||
return null;
|
||||
});
|
||||
|
||||
futures.add(future);
|
||||
}
|
||||
|
||||
// Wait for all threads to finish
|
||||
for (Future<Void> future : futures) {
|
||||
future.get();
|
||||
}
|
||||
|
||||
executorService.shutdown();
|
||||
}
|
||||
|
||||
private static void insertDataBatch(Connection connection, int numberOfRecords, int batchSize, int startId)
|
||||
throws SQLException {
|
||||
String insertQuery = "INSERT INTO t_user (user_id,user_name,user_pwd,user_sex) VALUES (0,?,?,?)";
|
||||
try (PreparedStatement preparedStatement = connection.prepareStatement(insertQuery)) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
for (int i = 0; i < numberOfRecords; i++) {
|
||||
// Set values for each column in the prepared statement
|
||||
preparedStatement.setString(1, "a");
|
||||
preparedStatement.setString(2, "哈");
|
||||
preparedStatement.setInt(3, 1);
|
||||
|
||||
// Add the batch for execution
|
||||
preparedStatement.addBatch();
|
||||
|
||||
// Execute the batch every 'batchSize' records
|
||||
if ((i + 1) % batchSize == 0 || i == numberOfRecords - 1) {
|
||||
preparedStatement.executeBatch();
|
||||
preparedStatement.clearBatch();
|
||||
}
|
||||
}
|
||||
log.info("Insert {} records in {} ms", numberOfRecords, System.currentTimeMillis() - startTime);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package net.srt.datax.vo;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
import org.springframework.format.annotation.DateTimeFormat;
|
||||
|
||||
import javax.validation.constraints.NotBlank;
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* @ClassName StuDAO
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@Data
|
||||
@Schema(description = "字典数据")
|
||||
@TableName("data_datax")
|
||||
public class StrDatax {
|
||||
|
||||
@Schema(description = "id", required = true)
|
||||
@NotNull(message = "id不能为空")
|
||||
@TableId(value = "id")
|
||||
private Long id;
|
||||
|
||||
@Schema(description = "库名", required = true)
|
||||
@NotNull(message = "库名不能为空")
|
||||
private String name;
|
||||
|
||||
@Schema(description = "ip", required = true)
|
||||
@NotNull(message = "ip不能为空")
|
||||
private String ip;
|
||||
|
||||
@Schema(description = "端口", required = true)
|
||||
@NotNull(message = "端口不能为空")
|
||||
private String port;
|
||||
|
||||
@Schema(description = "状态")
|
||||
private String status;
|
||||
|
||||
@Schema(description = "创建时间", required = true)
|
||||
@NotBlank(message = "创建时间不能为空")
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date createTime;
|
||||
|
||||
@Schema(description = "创建人", required = true)
|
||||
@NotBlank(message = "创建人不能为空")
|
||||
private String createUser;
|
||||
|
||||
@Schema(description = "修改时间", required = true)
|
||||
@NotBlank(message = "修改时间不能为空")
|
||||
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date updateTime;
|
||||
|
||||
@Schema(description = "修改人", required = true)
|
||||
@NotBlank(message = "修改人不能为空")
|
||||
private String updateUser;
|
||||
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
auth:
|
||||
ignore_urls:
|
||||
- /datax/**
|
||||
- /api/quartz/**
|
|
@ -0,0 +1,34 @@
|
|||
server:
|
||||
port: 8092
|
||||
|
||||
spring:
|
||||
application:
|
||||
name: srt-cloud-datax-service
|
||||
profiles:
|
||||
active: dev
|
||||
cloud:
|
||||
nacos:
|
||||
discovery:
|
||||
server-addr: 101.34.77.101:8848
|
||||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
file-extension: yaml
|
||||
# 指定配置
|
||||
extension-configs:
|
||||
- data-id: datasource.yaml
|
||||
refresh: true
|
||||
# feign 配置
|
||||
feign:
|
||||
client:
|
||||
config:
|
||||
default:
|
||||
connectTimeout: 1200000
|
||||
readTimeout: 1200000
|
||||
loggerLevel: basic
|
||||
okhttp:
|
||||
enabled: true
|
|
@ -0,0 +1,48 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--status用于设置log4j2框架内部的日志信息输出,设置成OFF将禁止log4j2内部日志输出,毕竟这个日志对我们没有什么作用,如果设置成trace,你会看到log4j2内部各种详细输出;monitorInterval是监控间隔,例如下面的设置是指:log4j2每隔600秒自动监控该配置文件是否有变化,如果有变化,则根据文件内容新的配置生成日志-->
|
||||
<configuration status="OFF" monitorInterval="600">
|
||||
<Properties>
|
||||
<property name="LOG_PATH">./logs/</property>
|
||||
<property name="LOG_FILE">srt-cloud-quartz</property>
|
||||
</Properties>
|
||||
<!--定义添加器-->
|
||||
<appenders>
|
||||
<!--Console是输出控制台的标签,target可以控制往控制台输出日志的颜色,例如SYSTEM_OUT就是蓝色的,SYSTEM_ERR就是红色的-->
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<!--控制台只输出level及以上级别的信息,onMatch为true代表符合level标准的才输出,onMismatch为true代表不符合level标准的就不输出-->
|
||||
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="ACCEPT"/>
|
||||
<!--这个是输出日志的格式,如果对里面的参数不理解,可以去看我的这篇文章,网址是:“https://blog.csdn.net/qq_42449963/article/details/104617356”-->
|
||||
<!--<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>-->
|
||||
<PatternLayout
|
||||
pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %highlight{%6p} %style{%5pid}{bright,magenta} --- [%15.15t] %style{%c{20}}{bright,cyan}: %m%n"/>
|
||||
</Console>
|
||||
|
||||
<!--这种存储文件的方式更加合理,可以设置多长时间把文件归档一次,也可以设置多大文件归档一次,如果都把所有的日志存在一个文件里面,文件会受不了的,解释一下参数信息:fileName后面如果后面不跟/,例如dev/logs/app.log,那就会把日志文件放在project工程下面,不是所属的项目下面如果后面跟/,例如/dev/logs/app.log,那就会把日志文件直接放在项目所在盘符的根目录下,例如项目在E盘存放,那就会把日志文件直接放在E盘的根目录下,如果后面直接加盘符,那就会存在特定的位置,例如F:/dev/logs/app.log,那就会直接放在F盘中特定的位置,上面都是经过测验的,fileName后面的app.log文件相当于是一个缓存文件,我们会把日志信息先放在app.log中,当达到我们设置的要求之后会把app.log中的日志信息转移到filePattern指定的日志文件中,转移的内容就会从app.log日志文件中清除,没有转移的内容还存放在app.log中,等到下一次符合要求的时候在进行一次转移-->
|
||||
<!--$${date:yyyy-MM}用在文件上面,输出的是目录的名字,例如2020-03,%d{MM-dd-yyyy}输入的就是月日年,例如03-02-2020,%i按照轮询输出,毕竟一天可能有符合要求的多个日志文件生成,所以需要在后面加一个类似于后缀的东西,当天的第一个日志文件可能是-1.log.gz,第二个文件就是-2.log.gz-->
|
||||
<RollingFile name="RollingFile" fileName="${LOG_PATH}/${LOG_FILE}.log"
|
||||
filePattern="${LOG_PATH}/$${date:yyyy-MM}/${LOG_FILE}-%d{yyyy-MM-dd}-%i.log">
|
||||
<!--%thread:线程名;%-5level:级别从左显示5个字符宽度;%msg:在代码中需要输出的日志消息;%class{36}:估计显示的是完整类名-->
|
||||
<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss z} %-5level %class{36} %L %M - %msg%xEx%n"/>
|
||||
<!--<SizeBasedTriggeringPolicy size="300MB"/>-->
|
||||
<Policies>
|
||||
<!--TimeBasedTriggeringPolicy基于时间的触发策略,integer属性和上面<RollingFile>标签中的filePattern的值有关,例如:filePattern=”xxx%d{yyyy-MM-dd}xx” interval=”1” 表示将1天一个日志文件;filePattern=”xxx%d{yyyy-MM-dd-HH}xxx” interval=”1”表示一个小时一个日志文件,也就是说interval的单位取决于filePattern中的最小时间单位;modulate是(boolean)以0点钟为边界进行偏移计算,应该就是假设你中午启动项目,晚上0点也是一天了,而不是经过24小时才算一天-->
|
||||
<TimeBasedTriggeringPolicy interval="1" modulate="true"/>
|
||||
<!--当app.log文件大小到达100MB的时候,就归档一次日志文件,也就是把app.log中的那前面100MB文件取出来,放到上面<RollingFile >中的filePattern后面的路径中-->
|
||||
<SizeBasedTriggeringPolicy size="100MB"/>
|
||||
</Policies>
|
||||
</RollingFile>
|
||||
</appenders>
|
||||
|
||||
<loggers>
|
||||
<logger name="net.srt.quartz.dao" level="DEBUG" additivity="false">
|
||||
<appender-ref ref="Console"/>
|
||||
</logger>
|
||||
<!--level="info"代表只能打印出info及其以上的信息;Console是上面Console标签的名字,往这一写,就可以往控制台上输出内容了,RollingFile是上面RollingFile标签的名字,往这一写,就会往设定的文件中输出内容了;当程序运行的时候就会被创建日志输出文件,不过里面没有任何日志内容,是否往里面输入日志,是通过下面的appender-ref标签控制的-->
|
||||
<root level="info">
|
||||
<appender-ref ref="Console"/>
|
||||
<!--一般不使用这个,只是让你知道有这个输出日志文件的方式而已-->
|
||||
<!--<appender-ref ref="File"/>-->
|
||||
<appender-ref ref="RollingFile"/>
|
||||
</root>
|
||||
</loggers>
|
||||
</configuration>
|
|
@ -0,0 +1,38 @@
|
|||
target/
|
||||
!.mvn/wrapper/maven-wrapper.jar
|
||||
!**/src/main/**/target/
|
||||
!**/src/test/**/target/
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea/modules.xml
|
||||
.idea/jarRepositories.xml
|
||||
.idea/compiler.xml
|
||||
.idea/libraries/
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### Eclipse ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
build/
|
||||
!**/src/main/**/build/
|
||||
!**/src/test/**/build/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
### Mac OS ###
|
||||
.DS_Store
|
|
@ -0,0 +1,262 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-module</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>srt-cloud-datax</artifactId>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-api</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<!--数据血缘-->
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-data-lineage</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<!--使用log42j-->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-log4j2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-dbswitch</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-mybatis</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-bootstrap</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.xiaoymin</groupId>
|
||||
<artifactId>knife4j-springdoc-ui</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.quartz-scheduler</groupId>
|
||||
<artifactId>quartz</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-data-integrate</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<!-- mvn install:install-file -Dfile=D:/Student_IDEA/srt-cloud/data-center/srt-cloud-module/srt-cloud-datax/src/main/java/net/srt/datax/lib/datax-core-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=datax-core -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>com.alibaba.datax</groupId>
|
||||
<artifactId>datax-core</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- mvn install:install-file -Dfile=lib/datax-common-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=datax-common -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>com.alibaba.datax</groupId>
|
||||
<artifactId>datax-common</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- mvn install:install-file -Dfile=lib/datax-transformer-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=datax-transformer -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>com.alibaba.datax</groupId>
|
||||
<artifactId>datax-transformer</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- mvn install:install-file -Dfile=lib/datax-core-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=mysqlreader -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>com.alibaba.datax</groupId>
|
||||
<artifactId>mysqlreader</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- mvn install:install-file -Dfile=lib/datax-core-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=mysqlwriter -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>com.alibaba.datax</groupId>-->
|
||||
<!-- <artifactId>mysqlwriter</artifactId>-->
|
||||
<!-- <version>0.0.1-SNAPSHOT</version>-->
|
||||
<!-- </dependency>-->
|
||||
<!-- mvn install:install-file -Dfile=lib/datax-core-0.0.1-SNAPSHOT.jar -DgroupId=com.alibaba.datax -DartifactId=oraclereader -Dversion=0.0.1-SNAPSHOT -Dpackaging=jar
|
||||
-->
|
||||
|
||||
<dependency>
|
||||
<groupId>com.alibaba.datax</groupId>
|
||||
<artifactId>oraclereader</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
||||
<build>
|
||||
<!--<finalName>${project.artifactId}</finalName>-->
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>appassembler-maven-plugin</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<!-- 如果不配置 generate-daemons,则打包命令为 mvn clean package appassembler:assemble -->
|
||||
<!-- 如果配置了 generate-daemons,打包命令可以是 mvn clean package 也可以是 mvn clean package appassembler:assemble -->
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-jsw-scripts</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>generate-daemons</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
|
||||
<configuration>
|
||||
<!-- flat与lib共同决定将项目用的的所有jar包复制到lib目录下 -->
|
||||
<repositoryLayout>flat</repositoryLayout>
|
||||
<!--从哪里copy配置文件-->
|
||||
<configurationSourceDirectory>src/main/resources</configurationSourceDirectory>
|
||||
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
|
||||
<!--是否copy配置文件-->
|
||||
<copyConfigurationDirectory>true</copyConfigurationDirectory>
|
||||
<!--配置文件存放在conf目录路径-->
|
||||
<configurationDirectory>conf</configurationDirectory>
|
||||
<!-- 打包的jar,以及maven依赖的jar放到这个目录里面 -->
|
||||
<repositoryName>lib</repositoryName>
|
||||
<!-- 可执行脚本的目录 -->
|
||||
<binFolder>bin</binFolder>
|
||||
<encoding>UTF-8</encoding>
|
||||
<logsDirectory>logs</logsDirectory>
|
||||
|
||||
<daemons>
|
||||
<daemon>
|
||||
<id>${project.artifactId}</id>
|
||||
<mainClass>net.srt.QuartzApplication</mainClass>
|
||||
<platforms>
|
||||
<platform>jsw</platform>
|
||||
</platforms>
|
||||
<generatorConfigurations>
|
||||
<generatorConfiguration>
|
||||
<generator>jsw</generator>
|
||||
<includes>
|
||||
<include>linux-x86-32</include>
|
||||
<include>linux-x86-64</include>
|
||||
<include>windows-x86-32</include>
|
||||
<include>windows-x86-64</include>
|
||||
</includes>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>configuration.directory.in.classpath.first</name>
|
||||
<value>conf</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.ping.timeout</name>
|
||||
<value>120</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>set.default.REPO_DIR</name>
|
||||
<value>lib</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.logfile</name>
|
||||
<value>logs/wrapper.log</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</generatorConfiguration>
|
||||
</generatorConfigurations>
|
||||
<jvmSettings>
|
||||
<!-- jvm参数 -->
|
||||
<!--<systemProperties>
|
||||
<systemProperty>com.sun.management.jmxremote</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.port=1984</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.authenticate=false</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.ssl=false</systemProperty>
|
||||
</systemProperties>-->
|
||||
<extraArguments>
|
||||
<extraArgument>-server</extraArgument>
|
||||
<extraArgument>-Dfile.encoding=utf-8</extraArgument>
|
||||
<extraArgument>-Xms128m</extraArgument>
|
||||
<extraArgument>-Xmx2048m</extraArgument>
|
||||
<extraArgument>-XX:+PrintGCDetails</extraArgument><!--输出GC的详细日志-->
|
||||
<extraArgument>-XX:+PrintGCDateStamps</extraArgument><!--输出GC的时间戳-->
|
||||
<extraArgument>-Xloggc:logs/gc.log</extraArgument><!--日志文件的输出路径-->
|
||||
</extraArguments>
|
||||
</jvmSettings>
|
||||
</daemon>
|
||||
</daemons>
|
||||
<programs>
|
||||
<program>
|
||||
<mainClass>net.srt.QuartzApplication</mainClass>
|
||||
<id>${project.artifactId}</id>
|
||||
</program>
|
||||
</programs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!--打包 日常调试打包可以把该组件注释掉,不然install的速度比较慢-->
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${project.parent.parent.basedir}/assembly/assembly-win.xml</descriptor>
|
||||
<descriptor>${project.parent.parent.basedir}/assembly/assembly-linux.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-assembly</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>-->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<skipTests>true</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
|
@ -0,0 +1,23 @@
|
|||
package net.srt.datax;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
|
||||
/**
|
||||
* @ClassName StuApp
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@EnableFeignClients
|
||||
@EnableDiscoveryClient
|
||||
@SpringBootApplication
|
||||
@ComponentScan(basePackages = "net.srt.framework.common.cache")
|
||||
public class DataxApp {
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(DataxApp.class);
|
||||
System.out.println("Hello world!");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package net.srt.datax.controllor;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import lombok.AllArgsConstructor;
|
||||
import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
||||
import net.srt.datax.server.DataxService;
|
||||
import net.srt.framework.common.utils.Result;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
/**
|
||||
* @ClassName StuController
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/datax")
|
||||
@Tag(name = "datax同步")
|
||||
public class DataxController {
|
||||
@Autowired
|
||||
private DataxService dataxService;
|
||||
|
||||
@Operation(summary = "执行")
|
||||
@PostMapping("/execute")
|
||||
public Result execute(@RequestBody DataAccessDto dataAccessDto) {
|
||||
dataxService.datax(dataAccessDto);
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package net.srt.datax.feign;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
||||
import net.srt.framework.common.utils.Result;
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
|
||||
/**
|
||||
* @author : WangZhanpeng
|
||||
* @date : 2023/12/15 19:26
|
||||
*/
|
||||
@FeignClient(name = "srt-cloud-data-integrate",fallback = DataAccessFeignImpl.class)
|
||||
public interface DataAccessFeign {
|
||||
|
||||
@PostMapping("/datax/execute")
|
||||
@Operation(summary = "定时器===>datax")
|
||||
public Result execute(@RequestBody DataAccessDto dataAccessDto);
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package net.srt.datax.feign;
|
||||
|
||||
import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
||||
import net.srt.datax.feign.DataAccessFeign;
|
||||
import net.srt.dto.DataAccessClientDto;
|
||||
import net.srt.framework.common.utils.Result;
|
||||
|
||||
/**
|
||||
* @author : WangZhanpeng
|
||||
* @date : 2023/12/15 19:28
|
||||
*/
|
||||
public class DataAccessFeignImpl implements DataAccessFeign {
|
||||
@Override
|
||||
public Result execute(DataAccessDto dataAccessDto) {
|
||||
return Result.error("服务器繁忙,请稍等~~~~");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package net.srt.datax.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
public interface DataxMapper {
|
||||
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package net.srt.datax.server;
|
||||
|
||||
import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
||||
|
||||
/**
|
||||
* @ClassName StrDataxService
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
public interface DataxService {
|
||||
|
||||
void datax(DataAccessDto dataAccessDto);
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
package net.srt.datax.server.impl;
|
||||
|
||||
import com.alibaba.datax.core.Engine;
|
||||
import lombok.AllArgsConstructor;
|
||||
import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
||||
import net.srt.dao.DataDatabaseDao;
|
||||
import net.srt.datax.mapper.DataxMapper;
|
||||
import net.srt.datax.server.DataxService;
|
||||
import net.srt.entity.DataDatabaseEntity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.LocalTime;
|
||||
|
||||
/**
|
||||
* @ClassName StrDataxServiceImpl
|
||||
* @Description 描述
|
||||
* @Author 栗永斌
|
||||
*/
|
||||
@Service
|
||||
public class DataxServiceImpl implements DataxService {
|
||||
@Autowired
|
||||
private DataxMapper dataxMapper;
|
||||
|
||||
@Autowired
|
||||
private DataDatabaseDao dataDatabaseDao;
|
||||
@Override
|
||||
public void datax(DataAccessDto dataAccessDto) {
|
||||
String jobJson = buildJobJSON(dataAccessDto);
|
||||
// 调用datax完成数据同步
|
||||
executeDataX(jobJson);
|
||||
}
|
||||
|
||||
private void executeDataX(String job) {
|
||||
|
||||
System.setProperty("now", LocalTime.now().toString());
|
||||
String[] dataxArgs = {"-job", job, "-mode", "standalone", "-jobid", "-1"};
|
||||
try {
|
||||
Engine.entry(dataxArgs);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private String buildJobJSON(DataAccessDto dataAccessData) {
|
||||
|
||||
//获取源端数据库信息
|
||||
DataDatabaseEntity sourceDatabase = dataDatabaseDao.selectById(dataAccessData.getSourceDatabaseId());
|
||||
|
||||
//获取目标端数据库信息
|
||||
DataDatabaseEntity targetDatabase = dataDatabaseDao.selectById(dataAccessData.getTargetDatabaseId());
|
||||
|
||||
|
||||
// 根据任务详情和数据库信息生成job.json
|
||||
String dataxJson = "{\n" +
|
||||
" \"job\": {\n" +
|
||||
" \"setting\": {\n" +
|
||||
" \"speed\": {\n" +
|
||||
" \"channel\": 5\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"content\": [\n" +
|
||||
" {\n" +
|
||||
" \"reader\": {\n" +
|
||||
" \"name\": \"mysqlreader\",\n" +
|
||||
" \"parameter\": {\n" +
|
||||
" \"username\": \""+sourceDatabase.getUserName()+"\",\n" +
|
||||
" \"password\": \""+sourceDatabase.getPassword()+"\",\n" +
|
||||
" \"column\": [\"*\"],\n" +
|
||||
" \"splitPk\": \"id\",\n" +
|
||||
" \"connection\": [\n" +
|
||||
" {\n" +
|
||||
" \"jdbcUrl\": [\""+sourceDatabase.getJdbcUrl()+"\"],\n" +
|
||||
" \"table\": [\"source_table\"]\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"writer\": {\n" +
|
||||
" \"name\": \"mysqlwriter\",\n" +
|
||||
" \"parameter\": {\n" +
|
||||
" \"writeMode\": \"replace\",\n" +
|
||||
" \"username\": \""+targetDatabase.getUserName()+"\",\n" +
|
||||
" \"password\": \""+targetDatabase.getPassword()+"\",\n" +
|
||||
" \"column\": [\"*\"],\n" +
|
||||
" \"session\": [\n" +
|
||||
" \"set session sql_mode='ANSI_QUOTES'\"\n" +
|
||||
" ],\n" +
|
||||
" \"connection\": [\n" +
|
||||
" {\n" +
|
||||
" \"jdbcUrl\": \""+targetDatabase.getJdbcUrl()+"\",\n" +
|
||||
" \"table\": [\"target_table\"]\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
return dataxJson;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
auth:
|
||||
ignore_urls:
|
||||
- /datax/**
|
||||
- /api/quartz/**
|
|
@ -0,0 +1,36 @@
|
|||
server:
|
||||
port: 8091
|
||||
|
||||
spring:
|
||||
application:
|
||||
name: srt-cloud-datax
|
||||
profiles:
|
||||
active: dev
|
||||
cloud:
|
||||
nacos:
|
||||
discovery:
|
||||
server-addr: 101.34.77.101:8848
|
||||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
file-extension: yaml
|
||||
# 指定配置
|
||||
extension-configs:
|
||||
- data-id: datasource.yaml
|
||||
refresh: true
|
||||
# feign 配置
|
||||
feign:
|
||||
client:
|
||||
config:
|
||||
default:
|
||||
connectTimeout: 1200000
|
||||
readTimeout: 1200000
|
||||
loggerLevel: basic
|
||||
okhttp:
|
||||
enabled: true
|
||||
datax:
|
||||
home:
|
|
@ -0,0 +1,48 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--status用于设置log4j2框架内部的日志信息输出,设置成OFF将禁止log4j2内部日志输出,毕竟这个日志对我们没有什么作用,如果设置成trace,你会看到log4j2内部各种详细输出;monitorInterval是监控间隔,例如下面的设置是指:log4j2每隔600秒自动监控该配置文件是否有变化,如果有变化,则根据文件内容新的配置生成日志-->
|
||||
<configuration status="OFF" monitorInterval="600">
|
||||
<Properties>
|
||||
<property name="LOG_PATH">./logs/</property>
|
||||
<property name="LOG_FILE">srt-cloud-quartz</property>
|
||||
</Properties>
|
||||
<!--定义添加器-->
|
||||
<appenders>
|
||||
<!--Console是输出控制台的标签,target可以控制往控制台输出日志的颜色,例如SYSTEM_OUT就是蓝色的,SYSTEM_ERR就是红色的-->
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<!--控制台只输出level及以上级别的信息,onMatch为true代表符合level标准的才输出,onMismatch为true代表不符合level标准的就不输出-->
|
||||
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="ACCEPT"/>
|
||||
<!--这个是输出日志的格式,如果对里面的参数不理解,可以去看我的这篇文章,网址是:“https://blog.csdn.net/qq_42449963/article/details/104617356”-->
|
||||
<!--<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>-->
|
||||
<PatternLayout
|
||||
pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %highlight{%6p} %style{%5pid}{bright,magenta} --- [%15.15t] %style{%c{20}}{bright,cyan}: %m%n"/>
|
||||
</Console>
|
||||
|
||||
<!--这种存储文件的方式更加合理,可以设置多长时间把文件归档一次,也可以设置多大文件归档一次,如果都把所有的日志存在一个文件里面,文件会受不了的,解释一下参数信息:fileName后面如果后面不跟/,例如dev/logs/app.log,那就会把日志文件放在project工程下面,不是所属的项目下面如果后面跟/,例如/dev/logs/app.log,那就会把日志文件直接放在项目所在盘符的根目录下,例如项目在E盘存放,那就会把日志文件直接放在E盘的根目录下,如果后面直接加盘符,那就会存在特定的位置,例如F:/dev/logs/app.log,那就会直接放在F盘中特定的位置,上面都是经过测验的,fileName后面的app.log文件相当于是一个缓存文件,我们会把日志信息先放在app.log中,当达到我们设置的要求之后会把app.log中的日志信息转移到filePattern指定的日志文件中,转移的内容就会从app.log日志文件中清除,没有转移的内容还存放在app.log中,等到下一次符合要求的时候在进行一次转移-->
|
||||
<!--$${date:yyyy-MM}用在文件上面,输出的是目录的名字,例如2020-03,%d{MM-dd-yyyy}输入的就是月日年,例如03-02-2020,%i按照轮询输出,毕竟一天可能有符合要求的多个日志文件生成,所以需要在后面加一个类似于后缀的东西,当天的第一个日志文件可能是-1.log.gz,第二个文件就是-2.log.gz-->
|
||||
<RollingFile name="RollingFile" fileName="${LOG_PATH}/${LOG_FILE}.log"
|
||||
filePattern="${LOG_PATH}/$${date:yyyy-MM}/${LOG_FILE}-%d{yyyy-MM-dd}-%i.log">
|
||||
<!--%thread:线程名;%-5level:级别从左显示5个字符宽度;%msg:在代码中需要输出的日志消息;%class{36}:估计显示的是完整类名-->
|
||||
<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss z} %-5level %class{36} %L %M - %msg%xEx%n"/>
|
||||
<!--<SizeBasedTriggeringPolicy size="300MB"/>-->
|
||||
<Policies>
|
||||
<!--TimeBasedTriggeringPolicy基于时间的触发策略,integer属性和上面<RollingFile>标签中的filePattern的值有关,例如:filePattern=”xxx%d{yyyy-MM-dd}xx” interval=”1” 表示将1天一个日志文件;filePattern=”xxx%d{yyyy-MM-dd-HH}xxx” interval=”1”表示一个小时一个日志文件,也就是说interval的单位取决于filePattern中的最小时间单位;modulate是(boolean)以0点钟为边界进行偏移计算,应该就是假设你中午启动项目,晚上0点也是一天了,而不是经过24小时才算一天-->
|
||||
<TimeBasedTriggeringPolicy interval="1" modulate="true"/>
|
||||
<!--当app.log文件大小到达100MB的时候,就归档一次日志文件,也就是把app.log中的那前面100MB文件取出来,放到上面<RollingFile >中的filePattern后面的路径中-->
|
||||
<SizeBasedTriggeringPolicy size="100MB"/>
|
||||
</Policies>
|
||||
</RollingFile>
|
||||
</appenders>
|
||||
|
||||
<loggers>
|
||||
<logger name="net.srt.quartz.dao" level="DEBUG" additivity="false">
|
||||
<appender-ref ref="Console"/>
|
||||
</logger>
|
||||
<!--level="info"代表只能打印出info及其以上的信息;Console是上面Console标签的名字,往这一写,就可以往控制台上输出内容了,RollingFile是上面RollingFile标签的名字,往这一写,就会往设定的文件中输出内容了;当程序运行的时候就会被创建日志输出文件,不过里面没有任何日志内容,是否往里面输入日志,是通过下面的appender-ref标签控制的-->
|
||||
<root level="info">
|
||||
<appender-ref ref="Console"/>
|
||||
<!--一般不使用这个,只是让你知道有这个输出日志文件的方式而已-->
|
||||
<!--<appender-ref ref="File"/>-->
|
||||
<appender-ref ref="RollingFile"/>
|
||||
</root>
|
||||
</loggers>
|
||||
</configuration>
|
|
@ -13,7 +13,7 @@ spring:
|
|||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.0
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
|
|
|
@ -61,6 +61,11 @@
|
|||
<groupId>org.quartz-scheduler</groupId>
|
||||
<artifactId>quartz</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-datax</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -4,6 +4,7 @@ import org.springframework.boot.SpringApplication;
|
|||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
|
||||
/**
|
||||
* 定时任务模块
|
||||
|
@ -13,6 +14,7 @@ import org.springframework.cloud.openfeign.EnableFeignClients;
|
|||
@EnableFeignClients
|
||||
@EnableDiscoveryClient
|
||||
@SpringBootApplication
|
||||
@ComponentScan(basePackages = "net.srt.framework.common.cache")
|
||||
public class QuartzApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
|
|
@ -8,6 +8,7 @@ import net.srt.api.module.data.integrate.dto.DataAccessDto;
|
|||
import net.srt.api.module.data.integrate.dto.DataAccessTaskDto;
|
||||
import net.srt.api.module.quartz.QuartzDataAccessApi;
|
||||
import net.srt.api.module.quartz.constant.QuartzJobType;
|
||||
import net.srt.datax.feign.DataAccessFeign;
|
||||
import net.srt.framework.common.utils.Result;
|
||||
import net.srt.quartz.entity.ScheduleJobEntity;
|
||||
import net.srt.quartz.enums.JobGroupEnum;
|
||||
|
@ -31,6 +32,7 @@ public class QuartzDataAccessApiImpl implements QuartzDataAccessApi {
|
|||
private final Scheduler scheduler;
|
||||
private final DataAccessApi dataAccessApi;
|
||||
private final ScheduleJobService jobService;
|
||||
private final DataAccessFeign feign;
|
||||
|
||||
@Override
|
||||
public Result<String> releaseAccess(Long id) {
|
||||
|
@ -63,7 +65,10 @@ public class QuartzDataAccessApiImpl implements QuartzDataAccessApi {
|
|||
|
||||
private ScheduleJobEntity buildJobEntity(Long id) {
|
||||
DataAccessDto dataAccessDto = dataAccessApi.getById(id).getData();
|
||||
return ScheduleJobEntity.builder().typeId(id).projectId(dataAccessDto.getProjectId()).jobType(QuartzJobType.DATA_ACCESS.getValue()).jobName(String.format("[%s]%s", id.toString(), dataAccessDto.getTaskName())).concurrent(ScheduleConcurrentEnum.NO.getValue())
|
||||
if (dataAccessDto.getIsDatax().equals("datax")){
|
||||
feign.execute(dataAccessDto);
|
||||
}
|
||||
return ScheduleJobEntity.builder().isDatax(dataAccessDto.getIsDatax()).typeId(id).projectId(dataAccessDto.getProjectId()).jobType(QuartzJobType.DATA_ACCESS.getValue()).jobName(String.format("[%s]%s", id.toString(), dataAccessDto.getTaskName())).concurrent(ScheduleConcurrentEnum.NO.getValue())
|
||||
.beanName("dataAccessTask").method("run").jobGroup(JobGroupEnum.DATA_ACCESS.getValue()).saveLog(true).cronExpression(dataAccessDto.getCron()).status(ScheduleStatusEnum.NORMAL.getValue())
|
||||
.params(String.valueOf(id)).once(TaskType.ONE_TIME_FULL_SYNC.getCode().equals(dataAccessDto.getTaskType())).build();
|
||||
|
||||
|
|
|
@ -125,4 +125,6 @@ public class ScheduleJobEntity implements Serializable {
|
|||
|
||||
private Boolean once;
|
||||
|
||||
private String isDatax;
|
||||
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
package net.srt.quartz.utils;
|
||||
|
||||
import net.srt.datax.feign.DataAccessFeign;
|
||||
import net.srt.framework.common.exception.ServerException;
|
||||
import net.srt.quartz.entity.ScheduleJobEntity;
|
||||
import net.srt.quartz.enums.ScheduleConcurrentEnum;
|
||||
import net.srt.quartz.enums.ScheduleStatusEnum;
|
||||
import org.quartz.*;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
|
@ -20,6 +22,9 @@ public class ScheduleUtils {
|
|||
*/
|
||||
public static final String JOB_PARAM_KEY = "JOB_PARAM_KEY";
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* 获取quartz任务类
|
||||
*/
|
||||
|
|
|
@ -13,7 +13,7 @@ spring:
|
|||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.0
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
|
|
|
@ -52,4 +52,5 @@ public class SysAttachmentController {
|
|||
|
||||
return Result.ok();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ spring:
|
|||
# 命名空间,默认:public
|
||||
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
|
||||
service: ${spring.application.name}
|
||||
group: srt2.0
|
||||
group: srt2.1
|
||||
config:
|
||||
server-addr: ${spring.cloud.nacos.discovery.server-addr}
|
||||
namespace: ${spring.cloud.nacos.discovery.namespace}
|
||||
|
|
|
@ -0,0 +1,219 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>srt-data-development</artifactId>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-api</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<!--使用log42j-->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-log4j2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.srt</groupId>
|
||||
<artifactId>srt-cloud-mybatis</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-bootstrap</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.whvcse</groupId>
|
||||
<artifactId>easy-captcha</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.xiaoymin</groupId>
|
||||
<artifactId>knife4j-springdoc-ui</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.aliyun.oss</groupId>
|
||||
<artifactId>aliyun-sdk-oss</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.qcloud</groupId>
|
||||
<artifactId>cos_api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.qiniu</groupId>
|
||||
<artifactId>qiniu-java-sdk</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.huaweicloud</groupId>
|
||||
<artifactId>esdk-obs-java-bundle</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.minio</groupId>
|
||||
<artifactId>minio</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<!--<finalName>${project.artifactId}</finalName>-->
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>appassembler-maven-plugin</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<!-- 如果不配置 generate-daemons,则打包命令为 mvn clean package appassembler:assemble -->
|
||||
<!-- 如果配置了 generate-daemons,打包命令可以是 mvn clean package 也可以是 mvn clean package appassembler:assemble -->
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-jsw-scripts</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>generate-daemons</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
|
||||
<configuration>
|
||||
<!-- flat与lib共同决定将项目用的的所有jar包复制到lib目录下 -->
|
||||
<repositoryLayout>flat</repositoryLayout>
|
||||
<!--从哪里copy配置文件-->
|
||||
<configurationSourceDirectory>src/main/resources</configurationSourceDirectory>
|
||||
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
|
||||
<!--是否copy配置文件-->
|
||||
<copyConfigurationDirectory>true</copyConfigurationDirectory>
|
||||
<!--配置文件存放在conf目录路径-->
|
||||
<configurationDirectory>conf</configurationDirectory>
|
||||
<!-- 打包的jar,以及maven依赖的jar放到这个目录里面 -->
|
||||
<repositoryName>lib</repositoryName>
|
||||
<!-- 可执行脚本的目录 -->
|
||||
<binFolder>bin</binFolder>
|
||||
<encoding>UTF-8</encoding>
|
||||
<logsDirectory>logs</logsDirectory>
|
||||
|
||||
<daemons>
|
||||
<daemon>
|
||||
<id>${project.artifactId}</id>
|
||||
<mainClass>net.srt.SystemApplication</mainClass>
|
||||
<platforms>
|
||||
<platform>jsw</platform>
|
||||
</platforms>
|
||||
<generatorConfigurations>
|
||||
<generatorConfiguration>
|
||||
<generator>jsw</generator>
|
||||
<includes>
|
||||
<include>linux-x86-32</include>
|
||||
<include>linux-x86-64</include>
|
||||
<include>windows-x86-32</include>
|
||||
<include>windows-x86-64</include>
|
||||
</includes>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>configuration.directory.in.classpath.first</name>
|
||||
<value>conf</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.ping.timeout</name>
|
||||
<value>120</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>set.default.REPO_DIR</name>
|
||||
<value>lib</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>wrapper.logfile</name>
|
||||
<value>logs/wrapper.log</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</generatorConfiguration>
|
||||
</generatorConfigurations>
|
||||
<jvmSettings>
|
||||
<!-- jvm参数 -->
|
||||
<!--<systemProperties>
|
||||
<systemProperty>com.sun.management.jmxremote</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.port=1984</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.authenticate=false</systemProperty>
|
||||
<systemProperty>com.sun.management.jmxremote.ssl=false</systemProperty>
|
||||
</systemProperties>-->
|
||||
<extraArguments>
|
||||
<extraArgument>-server</extraArgument>
|
||||
<extraArgument>-Dfile.encoding=utf-8</extraArgument>
|
||||
<extraArgument>-Xms128m</extraArgument>
|
||||
<extraArgument>-Xmx1024m</extraArgument>
|
||||
<extraArgument>-XX:+PrintGCDetails</extraArgument><!--输出GC的详细日志-->
|
||||
<extraArgument>-XX:+PrintGCDateStamps</extraArgument><!--输出GC的时间戳-->
|
||||
<extraArgument>-Xloggc:logs/gc.log</extraArgument><!--日志文件的输出路径-->
|
||||
</extraArguments>
|
||||
</jvmSettings>
|
||||
</daemon>
|
||||
</daemons>
|
||||
<programs>
|
||||
<program>
|
||||
<mainClass>net.srt.SystemApplication</mainClass>
|
||||
<id>${project.artifactId}</id>
|
||||
</program>
|
||||
</programs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<!--打包 日常调试打包可以把该组件注释掉,不然install的速度比较慢-->
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${project.parent.basedir}/assembly/assembly-win.xml</descriptor>
|
||||
<descriptor>${project.parent.basedir}/assembly/assembly-linux.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>make-assembly</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!--<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>-->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<skipTests>true</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,11 @@
|
|||
package net.srt;
|
||||
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
|
||||
@EnableFeignClients
|
||||
@EnableDiscoveryClient
|
||||
@SpringBootApplication
|
||||
public class DevelopmentApp {
|
||||
}
|
Loading…
Reference in New Issue