datax模块

pull/3/head
chenbingxuan 2023-12-20 22:30:31 +08:00
parent d6ae9ef856
commit f5ded45af6
22 changed files with 645 additions and 2 deletions

View File

@ -84,6 +84,12 @@ spring:
- Path=/srt-cloud-datax-service/** # Adjust the path as needed - Path=/srt-cloud-datax-service/** # Adjust the path as needed
filters: filters:
- StripPrefix=1 - StripPrefix=1
- id: srt-data-development # New Gateway
uri: lb://srt-data-development # Update with the correct URI for your new service
predicates:
- Path=/data-development/** # Adjust the path as needed
filters:
- StripPrefix=1
nacos: nacos:
discovery: discovery:

View File

@ -1,11 +1,17 @@
package net.srt; package net.srt;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient; import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
@EnableFeignClients
@EnableDiscoveryClient @EnableDiscoveryClient
@SpringBootApplication @SpringBootApplication
@MapperScan("net.srt.Fink.mapper")
@MapperScan("net.srt.Hadoop.mapper")
public class DevelopmentApp { public class DevelopmentApp {
public static void main(String[] args) {
SpringApplication.run(DevelopmentApp.class);
System.out.println("数据开发已启动===========>");
}
} }

View File

@ -0,0 +1,91 @@
package net.srt.Fink.controller;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.AllArgsConstructor;
import net.srt.Fink.service.FinkService;
import net.srt.Fink.vo.FinkVo;
import net.srt.Fink.vo.request.AddFink;
import net.srt.Fink.vo.request.FinkRequest;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.common.utils.Result;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping("/cluster")
@AllArgsConstructor
public class FinkController {
private FinkService finkService;
/**
*
* @param page
* @param limit
* @param name
* @param alias
* @return
*/
@GetMapping("/page")
public Result<PageResult<FinkVo>> finkPage(
@RequestParam Integer page,
@RequestParam Integer limit,
@RequestParam(value = "name", required = false) String name,
@RequestParam(value = "alias", required = false) String alias) {
FinkRequest finkRequest = new FinkRequest();
finkRequest.setAlias(alias);
finkRequest.setPage(page);
finkRequest.setLimit(limit);
finkRequest.setName(name);
PageResult<FinkVo> pageResult = finkService.finkPage(finkRequest);
return Result.ok(pageResult);
}
/**
*
* @param finkVo
* @return
*/
@PostMapping
public Result devAdd(@RequestBody FinkVo finkVo){
finkService.add(finkVo);
return Result.ok();
}
/**
*
* @param finkVo
* @return
*/
@PutMapping
public Result devUpd(@RequestBody FinkVo finkVo){
finkService.upd(finkVo);
return Result.ok();
}
/**
*
* @param finkVo
* @return
*/
@DeleteMapping
public Result devDel(@RequestBody List<FinkVo> finkVo){
finkService.del(finkVo);
return Result.ok();
}
/**
*
* @param id
* @return
*/
@GetMapping("{id}")
public Result<FinkVo> finkVoResult(@PathVariable Integer id){
return Result.ok(finkService.findFinkVo(id));
}
}

View File

@ -0,0 +1,9 @@
package net.srt.Fink.convert;
import net.srt.Fink.vo.FinkVo;
import java.util.List;
public interface FinkConvert {
List<FinkVo> devList(List<FinkVo> list);
}

View File

@ -0,0 +1,21 @@
package net.srt.Fink.convert;
import net.srt.Fink.vo.FinkVo;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
@Service
public class FinkConvertImp implements FinkConvert{
@Override
public List<FinkVo> devList(List<FinkVo> list) {
if (list==null){
return null;
}
ArrayList<FinkVo> finkVos = new ArrayList<>();
for (FinkVo finkVo : list) {
finkVos.add(finkVo);
}
return finkVos;
}
}

View File

@ -0,0 +1,9 @@
package net.srt.Fink.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import net.srt.Fink.vo.FinkVo;
import org.springframework.stereotype.Repository;
public interface FinkMapper extends BaseMapper<FinkVo> {
}

View File

@ -0,0 +1,22 @@
package net.srt.Fink.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.srt.Fink.vo.FinkVo;
import net.srt.Fink.vo.request.AddFink;
import net.srt.Fink.vo.request.FinkRequest;
import net.srt.framework.common.page.PageResult;
import java.util.List;
public interface FinkService extends IService<FinkVo> {
PageResult<FinkVo> finkPage(FinkRequest finkRequest);
void add(FinkVo finkVo);
void upd(FinkVo finkVo);
void del(List<FinkVo> finkVo);
FinkVo findFinkVo(Integer id);
}

View File

@ -0,0 +1,76 @@
package net.srt.Fink.service.impl;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import net.srt.Fink.convert.FinkConvert;
import net.srt.Fink.mapper.FinkMapper;
import net.srt.Fink.service.FinkService;
import net.srt.Fink.vo.FinkVo;
import net.srt.Fink.vo.request.FinkRequest;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.mybatis.service.impl.BaseServiceImpl;
import net.srt.framework.security.cache.TokenStoreCache;
import net.srt.framework.security.user.UserDetail;
import net.srt.framework.security.utils.TokenUtils;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.List;
@Service
@AllArgsConstructor
@Slf4j
public class FinkServiceImpl extends BaseServiceImpl<FinkMapper,FinkVo> implements FinkService {
private FinkMapper finkMapper;
private FinkConvert finkConvert;
@Override
public PageResult<FinkVo> finkPage(FinkRequest finkRequest) {
IPage<FinkVo> page = finkMapper.selectPage(getPage(finkRequest), getWrapper(finkRequest));
return new PageResult<>(finkConvert.devList(page.getRecords()), page.getTotal());
}
private LambdaQueryWrapper<FinkVo> getWrapper(FinkRequest query){
LambdaQueryWrapper<FinkVo> wrapper = Wrappers.lambdaQuery();
wrapper.like(StrUtil.isNotBlank(query.getName()), FinkVo::getName, query.getName());
wrapper.like(StrUtil.isNotBlank(query.getAlias()), FinkVo::getAlias, query.getAlias());
dataScopeWithoutOrgId(wrapper);
return wrapper;
}
@Override
public void add(FinkVo finkVo) {
Date date = new Date();
finkVo.setCreateTime(date);
finkMapper.insert(finkVo);
}
@Override
public void upd(FinkVo finkVo) {
Date date = new Date();
finkVo.setCreateTime(date);
finkMapper.updateById(finkVo);
}
@Override
public void del(List<FinkVo> finkVo) {
for (FinkVo vo : finkVo) {
finkMapper.deleteById(vo.getId());
}
}
@Override
public FinkVo findFinkVo(Integer id) {
return finkMapper.selectById(id);
}
}

View File

@ -0,0 +1,39 @@
package net.srt.Fink.vo;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
@Data
@TableName("cluster_info")
public class FinkVo {
@TableId("id")
private Integer id;
private Integer projectId;
private String name;
private String alias;
private Integer type;
private String hosts;
private String jobManagerHost;
private String flinkVersion;
private Integer status;
private String note;
private Byte autoRegisters;
private Integer clusterConfigurationId;
private Integer taskId;
private boolean enabled;
private Integer version;
private Integer deleted;
private Integer creator;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
private Integer updater;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
}

View File

@ -0,0 +1,13 @@
package net.srt.Fink.vo.request;
import lombok.Data;
@Data
public class AddFink {
private String name;
private String alias;
private Integer type;
private String hosts;
private boolean enabled;
private String note;
}

View File

@ -0,0 +1,10 @@
package net.srt.Fink.vo.request;
import lombok.Data;
import net.srt.framework.common.query.Query;
@Data
public class FinkRequest extends Query {
private String name;
private String alias;
}

View File

@ -0,0 +1,74 @@
package net.srt.Hadoop.controller;
import lombok.AllArgsConstructor;
import net.srt.Hadoop.service.HadoopService;
import net.srt.Hadoop.vo.HadoopVo;
import net.srt.Hadoop.vo.request.HadoopRequest;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.common.utils.Result;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@AllArgsConstructor
@RequestMapping("/cluster-configuration")
public class HadoopController {
private HadoopService hadoopService;
@GetMapping("/page")
public Result<PageResult<HadoopVo>> finkPage(
@RequestParam Integer page,
@RequestParam Integer limit,
@RequestParam(value = "name", required = false) String name,
@RequestParam(value = "alias", required = false) String alias) {
HadoopRequest hadoopRequest = new HadoopRequest();
hadoopRequest.setPage(page);
hadoopRequest.setLimit(limit);
hadoopRequest.setName(name);
hadoopRequest.setAlias(alias);
PageResult<HadoopVo> pageResult = hadoopService.hadoopPage(hadoopRequest);
return Result.ok(pageResult);
}
/**
*
* @param hadoopVo
* @return
*/
@PostMapping
public Result hadoopAdd(@RequestBody HadoopVo hadoopVo){
hadoopService.add(hadoopVo);
return Result.ok();
}
/**
*
* @param hadoopVo
* @return
*/
@PutMapping
public Result hadoopUpd(@RequestBody HadoopVo hadoopVo){
hadoopService.upd(hadoopVo);
return Result.ok();
}
/**
*
* @param hadoopVo
* @return
*/
@DeleteMapping
public Result hadoopDel(@RequestBody List<HadoopVo> hadoopVo){
hadoopService.del(hadoopVo);
return Result.ok();
}
/**
*
* @param id
* @return
*/
@GetMapping("{id}")
public Result<HadoopVo> hadoopVoResult(@PathVariable Integer id){
return Result.ok(hadoopService.findHadoopVo(id));
}
}

View File

@ -0,0 +1,10 @@
package net.srt.Hadoop.convert;
import net.srt.Fink.vo.FinkVo;
import net.srt.Hadoop.vo.HadoopVo;
import java.util.List;
public interface HadoopConvert {
List<HadoopVo> hadoopList(List<HadoopVo> list);
}

View File

@ -0,0 +1,23 @@
package net.srt.Hadoop.convert;
import net.srt.Fink.vo.FinkVo;
import net.srt.Hadoop.vo.HadoopVo;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
@Service
public class HadoopConvertImp implements HadoopConvert {
@Override
public List<HadoopVo> hadoopList(List<HadoopVo> list) {
if (list==null){
return null;
}
ArrayList<HadoopVo> hadoopVos = new ArrayList<>();
for (HadoopVo hadoopVo : list) {
hadoopVos.add(hadoopVo);
}
return hadoopVos;
}
}

View File

@ -0,0 +1,7 @@
package net.srt.Hadoop.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import net.srt.Hadoop.vo.HadoopVo;
public interface HadoopMapper extends BaseMapper<HadoopVo> {
}

View File

@ -0,0 +1,21 @@
package net.srt.Hadoop.service;
import com.baomidou.mybatisplus.extension.service.IService;
import net.srt.Fink.vo.FinkVo;
import net.srt.Hadoop.vo.HadoopVo;
import net.srt.Hadoop.vo.request.HadoopRequest;
import net.srt.framework.common.page.PageResult;
import java.util.List;
public interface HadoopService extends IService<HadoopVo> {
PageResult<HadoopVo> hadoopPage(HadoopRequest hadoopRequest);
void add(HadoopVo hadoopVo);
HadoopVo findHadoopVo(Integer id);
void del(List<HadoopVo> hadoopVo);
void upd(HadoopVo hadoopVo);
}

View File

@ -0,0 +1,62 @@
package net.srt.Hadoop.service.impl;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import lombok.AllArgsConstructor;
import net.srt.Fink.vo.FinkVo;
import net.srt.Fink.vo.request.FinkRequest;
import net.srt.Hadoop.controller.HadoopController;
import net.srt.Hadoop.convert.HadoopConvert;
import net.srt.Hadoop.mapper.HadoopMapper;
import net.srt.Hadoop.service.HadoopService;
import net.srt.Hadoop.vo.HadoopVo;
import net.srt.Hadoop.vo.request.HadoopRequest;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.mybatis.service.impl.BaseServiceImpl;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@AllArgsConstructor
public class HadoopServiceImpl extends BaseServiceImpl<HadoopMapper, HadoopVo> implements HadoopService {
private HadoopMapper hadoopMapper;
private HadoopConvert hadoopList;
@Override
public PageResult<HadoopVo> hadoopPage(HadoopRequest hadoopRequest) {
IPage<HadoopVo> page = hadoopMapper.selectPage(getPage(hadoopRequest), getWrapper(hadoopRequest));
return new PageResult<>(hadoopList.hadoopList(page.getRecords()), page.getTotal());
}
@Override
public void add(HadoopVo hadoopVo) {
}
@Override
public HadoopVo findHadoopVo(Integer id) {
return hadoopMapper.selectById(id);
}
@Override
public void del(List<HadoopVo> hadoopVo) {
for (HadoopVo vo : hadoopVo) {
hadoopMapper.deleteById(vo);
}
}
@Override
public void upd(HadoopVo hadoopVo) {
hadoopMapper.updateById(hadoopVo);
}
private LambdaQueryWrapper<HadoopVo> getWrapper(HadoopRequest hadoopRequest){
LambdaQueryWrapper<HadoopVo> wrapper = Wrappers.lambdaQuery();
wrapper.like(StrUtil.isNotBlank(hadoopRequest.getName()), HadoopVo::getName, hadoopRequest.getName());
wrapper.like(StrUtil.isNotBlank(hadoopRequest.getAlias()), HadoopVo::getAlias, hadoopRequest.getAlias());
dataScopeWithoutOrgId(wrapper);
return wrapper;
}
}

View File

@ -0,0 +1,35 @@
package net.srt.Hadoop.vo;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
@Data
@TableName("hadoop_info")
public class HadoopVo {
@TableId("id")
private Integer id;
private Integer projectId;
private String name;
private String alias;
private String type;
private String configJson;
private Boolean isAvailable;
private String note;
private Boolean enabled;
private Integer version;
private Integer deleted;
private Integer creator;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
private Integer updater;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
private String config;
}

View File

@ -0,0 +1,11 @@
package net.srt.Hadoop.vo.request;
import lombok.Data;
import net.srt.framework.common.query.Query;
@Data
public class HadoopRequest extends Query {
private String name;
private String alias;
}

View File

@ -0,0 +1,7 @@
auth:
ignore_urls:
- /auth/captcha
- /auth/login
- /auth/send/code
- /auth/mobile
- /upload/**

View File

@ -0,0 +1,43 @@
server:
port: 8093
spring:
mvc:
servlet:
load-on-startup: 1
application:
name: srt-data-development
profiles:
active: dev
cloud:
nacos:
discovery:
server-addr: 101.34.77.101:8848
# 命名空间默认public
namespace: c5d32e76-b83c-4254-8176-1c6a2cee8e3b
service: ${spring.application.name}
group: srt2.1
config:
server-addr: ${spring.cloud.nacos.discovery.server-addr}
namespace: ${spring.cloud.nacos.discovery.namespace}
file-extension: yaml
# 指定配置
extension-configs:
- data-id: datasource.yaml
refresh: true
servlet:
multipart:
max-request-size: 100MB
max-file-size: 1024MB
storage:
enabled: true
config:
# 存储类型local、aliyun
type: local
# 访问域名
domain: http://localhost:8082/sys
# 配置访问前缀
prefix:
local:
# 本地上传路径
path: D://upload

View File

@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--status用于设置log4j2框架内部的日志信息输出设置成OFF将禁止log4j2内部日志输出毕竟这个日志对我们没有什么作用如果设置成trace你会看到log4j2内部各种详细输出monitorInterval是监控间隔例如下面的设置是指log4j2每隔600秒自动监控该配置文件是否有变化如果有变化则根据文件内容新的配置生成日志-->
<configuration status="OFF" monitorInterval="600">
<Properties>
<property name="LOG_PATH">./logs/</property>
<property name="LOG_FILE">srt-cloud-system</property>
</Properties>
<!--定义添加器-->
<appenders>
<!--Console是输出控制台的标签target可以控制往控制台输出日志的颜色例如SYSTEM_OUT就是蓝色的SYSTEM_ERR就是红色的-->
<Console name="Console" target="SYSTEM_OUT">
<!--控制台只输出level及以上级别的信息onMatch为true代表符合level标准的才输出onMismatch为true代表不符合level标准的就不输出-->
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="ACCEPT"/>
<!--这个是输出日志的格式如果对里面的参数不理解可以去看我的这篇文章网址是“https://blog.csdn.net/qq_42449963/article/details/104617356”-->
<!--<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>-->
<PatternLayout
pattern=" %d{yyyy-MM-dd HH:mm:ss.SSS} %highlight{%6p} %style{%5pid}{bright,magenta} --- [%15.15t] %style{%c{20}}{bright,cyan}: %m%n"/>
</Console>
<!--这种存储文件的方式更加合理可以设置多长时间把文件归档一次也可以设置多大文件归档一次如果都把所有的日志存在一个文件里面文件会受不了的解释一下参数信息fileName后面如果后面不跟/例如dev/logs/app.log那就会把日志文件放在project工程下面不是所属的项目下面如果后面跟/,例如/dev/logs/app.log那就会把日志文件直接放在项目所在盘符的根目录下例如项目在E盘存放那就会把日志文件直接放在E盘的根目录下如果后面直接加盘符那就会存在特定的位置例如F:/dev/logs/app.log,那就会直接放在F盘中特定的位置上面都是经过测验的fileName后面的app.log文件相当于是一个缓存文件我们会把日志信息先放在app.log中当达到我们设置的要求之后会把app.log中的日志信息转移到filePattern指定的日志文件中转移的内容就会从app.log日志文件中清除没有转移的内容还存放在app.log中等到下一次符合要求的时候在进行一次转移-->
<!--$${date:yyyy-MM}用在文件上面输出的是目录的名字例如2020-03%d{MM-dd-yyyy}输入的就是月日年例如03-02-2020%i按照轮询输出毕竟一天可能有符合要求的多个日志文件生成所以需要在后面加一个类似于后缀的东西当天的第一个日志文件可能是-1.log.gz第二个文件就是-2.log.gz-->
<RollingFile name="RollingFile" fileName="${LOG_PATH}/${LOG_FILE}.log"
filePattern="${LOG_PATH}/$${date:yyyy-MM}/${LOG_FILE}-%d{yyyy-MM-dd}-%i.log">
<!--%thread:线程名;%-5level:级别从左显示5个字符宽度;%msg:在代码中需要输出的日志消息;%class{36}:估计显示的是完整类名-->
<PatternLayout pattern=" %d{yyyy-MM-dd HH:mm:ss z} %-5level %class{36} %L %M - %msg%xEx%n"/>
<!--<SizeBasedTriggeringPolicy size="300MB"/>-->
<Policies>
<!--TimeBasedTriggeringPolicy基于时间的触发策略integer属性和上面<RollingFile>标签中的filePattern的值有关,例如filePattern=”xxx%d{yyyy-MM-dd}xx” interval=”1” 表示将1天一个日志文件filePattern=”xxx%d{yyyy-MM-dd-HH}xxx” interval=”1”表示一个小时一个日志文件,也就是说interval的单位取决于filePattern中的最小时间单位modulate是boolean以0点钟为边界进行偏移计算应该就是假设你中午启动项目晚上0点也是一天了而不是经过24小时才算一天-->
<TimeBasedTriggeringPolicy interval="1" modulate="true"/>
<!--当app.log文件大小到达100MB的时候就归档一次日志文件也就是把app.log中的那前面100MB文件取出来放到上面<RollingFile >中的filePattern后面的路径中-->
<SizeBasedTriggeringPolicy size="100MB"/>
</Policies>
</RollingFile>
</appenders>
<loggers>
<logger name="net.srt.system.dao" level="DEBUG" additivity="false">
<appender-ref ref="Console"/>
</logger>
<!--level="info"代表只能打印出info及其以上的信息Console是上面Console标签的名字往这一写就可以往控制台上输出内容了RollingFile是上面RollingFile标签的名字往这一写就会往设定的文件中输出内容了当程序运行的时候就会被创建日志输出文件不过里面没有任何日志内容是否往里面输入日志是通过下面的appender-ref标签控制的-->
<root level="info">
<appender-ref ref="Console"/>
<!--一般不使用这个,只是让你知道有这个输出日志文件的方式而已-->
<!--<appender-ref ref="File"/>-->
<appender-ref ref="RollingFile"/>
</root>
</loggers>
</configuration>