最新一版

dev
chenbingxuan 2023-12-28 22:13:54 +08:00
parent 842d1c3b50
commit 83d1971550
18 changed files with 850 additions and 14 deletions

View File

@ -111,7 +111,7 @@ public class DataDatabaseController {
return Result.ok(schemaTableDataVo);
}
@GetMapping("/list-all")
@GetMapping("/list-all/{t}")
@Operation(summary = "获取当前用户所能看到的的数据表")
public Result<List<DataDatabaseVO>> listAll() {
List<DataDatabaseVO> list = dataDatabaseService.listAll();

View File

@ -70,7 +70,7 @@
<if test="contentType != null and contentType.trim() != ''">
AND dsac.content_type = #{contentType}
</if>
<if test="status != ">
<if test="status != null">
AND dsac.status = #{status}
</if>
<if test="sqlDbType != null">

View File

@ -1,13 +1,17 @@
package net.srt.disposition.controller;
import io.swagger.v3.oas.annotations.Operation;
import lombok.AllArgsConstructor;
import net.srt.disposition.dto.DataCheckSqlDto;
import net.srt.disposition.dto.DataSqlDto;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.entity.DataCheckSqlEntity;
import net.srt.disposition.entity.DevelopmentOperationalRecordsQuery;
import net.srt.disposition.service.DataCenterService;
import net.srt.disposition.service.DataCheckSqlService;
import net.srt.disposition.vo.DataCheckSqlVo;
import net.srt.disposition.vo.DataSqlVo;
import net.srt.disposition.vo.*;
import net.srt.flink.common.result.SqlExplainResult;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.common.utils.Result;
import org.springframework.web.bind.annotation.*;
@ -21,19 +25,41 @@ public class DataCheckSqlController {
private DataCheckSqlService dataCheckSqlService;
private DataCenterService dataCenterService;
@PostMapping
public Result add(@RequestBody DataCheckSqlDto dataCheckSqlDto){
dataCheckSqlService.add(dataCheckSqlDto);
return Result.ok();
}
@GetMapping("/history/page")
public Result<PageResult<DataCentre>> historyPage(@RequestBody DevelopmentOperationalRecordsQuery query){
PageResult<DataCentre> dataCentrePageResult= dataCenterService.dataCenterService(query);
return Result.ok(dataCentrePageResult);
}
@DeleteMapping("/history")
@Operation(summary = "运维中心删除")
public Result<PageResult<DevelopmentOperationalRecordsVo>> deleted(@RequestBody List<Long> ids){
dataCenterService.deleted(ids);
return Result.ok();
}
@GetMapping("/env-list")
@Operation(summary = "运维中心删除")
public Result<List<DevelopmentTaskSaveVo>> envList(){
List<DevelopmentTaskSaveVo> developmentTaskSaveVos = dataCenterService.listEnvList();
return Result.ok(developmentTaskSaveVos);
}
@GetMapping("/console-log")
public Result consoleLog(){
return Result.ok();
public Result<LogVo> consoleLog(){
return Result.ok(dataCheckSqlService.getLog());
}
@GetMapping("/clear-log")
public Result checkLog(){
dataCheckSqlService.clearLog();
return Result.ok();
}
@ -43,6 +69,14 @@ public class DataCheckSqlController {
return Result.ok(dataSqlVo);
}
@PostMapping("/execute-sql")
public Result<DataCheckVo> executeSql(@RequestBody DataSqlDto dataSqlDto){
DataCheckVo dataSqlVo=dataCheckSqlService.executeSql(dataSqlDto);
return Result.ok(dataSqlVo);
}
@GetMapping("/{id}")
public Result<DataCheckSqlEntity> get(@PathVariable Integer id) {
DataCheckSqlEntity dataCheckSqlEntity = dataCheckSqlService.find(id);

View File

@ -20,7 +20,7 @@ public class DataProductionTreeController {
private DataProductionService dataProductionService;
@GetMapping
public Result<List<TreeNodeVo>> listResult(@RequestParam String t){
public Result<List<TreeNodeVo>> listResult(){
List<TreeNodeVo> dispositionVos=dataProductionService.dataTreeList();
return Result.ok(dispositionVos);
}

View File

@ -0,0 +1,18 @@
package net.srt.disposition.convert;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.vo.DevelopmentTaskSaveVo;
import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
import java.util.List;
@Mapper
public interface DataCentreConvert {
DataCentreConvert INSTANCE = Mappers.getMapper(DataCentreConvert.class);
List<DataCentre> convertList(List<DataCentre> records);
List<DevelopmentTaskSaveVo> convert(List<DataCentre> entities);
}

View File

@ -12,7 +12,7 @@ import java.util.Date;
public class DataCentre {
private Long id;
private Long projectId;
private String sqlDbType;
private Integer sqlDbType;
private Long databaseId;
private Long clusterId;
private Long clusterConfigurationId;
@ -53,6 +53,6 @@ public class DataCentre {
private String executeSql;
private String executeNo;
private String jib;
private String duration;
private Integer duration;
}

View File

@ -23,8 +23,10 @@ public class DataDatabaseDevEntity {
private String databasePort;
private Integer databaseType;
private Integer deleted;
private Integer id;
private Long id;
private String isJdbc;
private String isRtApprove;
private String jdbcUrl;
private String name;
private String noReReason;
private String password;

View File

@ -0,0 +1,71 @@
package net.srt.disposition.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import net.srt.framework.common.query.Query;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
@Data
@EqualsAndHashCode(callSuper = false)
@Schema(description = "数据开发-运维中心查询")
public class DevelopmentOperationalRecordsQuery extends Query {
@Schema(description = "调度节点记录唯一标识符")
private Long nodeRecordId;
@Schema(description = "记录标识符")
private Long recordId;
@Schema(description = "任务唯一标识符")
private Long taskId;
@Schema(description = "作业名称")
private String jobName;
@Schema(description = "执行状态")
private Integer status;
@Schema(description = "实例状态")
private String instanceStatus;
@Schema(description = "方言")
private Integer dialect;
@Schema(description = "类型")
private String type;
@Schema(description = "SQL数据库类型")
private String sqlDbType;
@Schema(description = "数据库唯一标识符")
private Long databaseId;
@Schema(description = "集群唯一标识符")
private Integer clusterId;
@Schema(description = "集群配置唯一标识符")
private Long clusterConfigurationId;
@Schema(description = "开始时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
@Schema(description = "结束时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date endTime;
@Schema(description = "执行完成时间戳")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date finishTime;
}

View File

@ -0,0 +1,7 @@
package net.srt.disposition.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import net.srt.disposition.entity.DataCentre;
public interface DataCentreMapper extends BaseMapper<DataCentre> {
}

View File

@ -0,0 +1,19 @@
package net.srt.disposition.service;
import com.baomidou.mybatisplus.extension.service.IService;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.entity.DevelopmentOperationalRecordsQuery;
import net.srt.disposition.vo.DevelopmentTaskSaveVo;
import net.srt.framework.common.page.PageResult;
import java.util.List;
public interface DataCenterService extends IService<DataCentre> {
PageResult<DataCentre> dataCenterService(DevelopmentOperationalRecordsQuery query);
void deleted(List<Long> ids);
List<DevelopmentTaskSaveVo> listEnvList();
}

View File

@ -3,10 +3,14 @@ package net.srt.disposition.service;
import com.baomidou.mybatisplus.extension.service.IService;
import net.srt.disposition.dto.DataCheckSqlDto;
import net.srt.disposition.dto.DataSqlDto;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.entity.DataCheckSqlEntity;
import net.srt.disposition.vo.DataCheckSqlVo;
import net.srt.disposition.vo.DataCheckVo;
import net.srt.disposition.vo.DataSqlVo;
import net.srt.disposition.vo.LogVo;
import net.srt.flink.common.result.SqlExplainResult;
import net.srt.framework.common.page.PageResult;
import java.util.List;
@ -16,4 +20,13 @@ public interface DataCheckSqlService extends IService<DataCheckSqlEntity> {
void add(DataCheckSqlDto dataCheckSqlDto);
List<SqlExplainResult> explainSql(DataSqlDto dataSqlDto);
DataCheckVo executeSql(DataSqlDto dataSqlDto);
LogVo getLog();
void clearLog();
}

View File

@ -0,0 +1,68 @@
package net.srt.disposition.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import lombok.AllArgsConstructor;
import net.srt.disposition.convert.DataCentreConvert;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.entity.DevelopmentOperationalRecordsQuery;
import net.srt.disposition.mapper.DataCentreMapper;
import net.srt.disposition.service.DataCenterService;
import net.srt.disposition.vo.DevelopmentTaskSaveVo;
import net.srt.framework.common.page.PageResult;
import net.srt.framework.mybatis.service.impl.BaseServiceImpl;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@AllArgsConstructor
public class DataCenterServiceImpl extends BaseServiceImpl<DataCentreMapper, DataCentre> implements DataCenterService {
@Override
public PageResult<DataCentre> dataCenterService(DevelopmentOperationalRecordsQuery query) {
IPage<DataCentre> page=baseMapper.selectPage(getPage(query),getWrapper(query));
return new PageResult<>(DataCentreConvert.INSTANCE.convertList(page.getRecords()),page.getTotal());
}
@Override
public void deleted(List<Long> ids) {
removeByIds(ids);
for (Long id : ids) {
LambdaQueryWrapper<DataCentre> wrapper = Wrappers.lambdaQuery();
wrapper.eq(DataCentre::getId,id);
baseMapper.delete(wrapper);
}
}
@Override
public List<DevelopmentTaskSaveVo> listEnvList() {
LambdaQueryWrapper<DataCentre> wrapper = Wrappers.lambdaQuery();
List<DataCentre> entities = baseMapper.selectList(wrapper);
List<DevelopmentTaskSaveVo> developmentTaskSaveVos = DataCentreConvert.INSTANCE.convert(entities);
return developmentTaskSaveVos;
}
private LambdaQueryWrapper<DataCentre> getWrapper(DevelopmentOperationalRecordsQuery query) {
LambdaQueryWrapper<DataCentre> wrapper = Wrappers.lambdaQuery();
wrapper.eq(query.getNodeRecordId()!=null,DataCentre::getScheduleNodeRecordId,query.getNodeRecordId());
wrapper.eq(query.getTaskId()!=null,DataCentre::getTaskId,query.getTaskId());
wrapper.like(StringUtils.isNotBlank(query.getJobName()),DataCentre::getJobName,query.getJobName());
wrapper.eq(query.getStatus()!=null,DataCentre::getStatus,query.getStatus());
wrapper.eq(query.getInstanceStatus()!=null,DataCentre::getInstanceStatus,query.getInstanceStatus());
wrapper.eq(query.getDialect()!=null,DataCentre::getDialect,query.getDialect());
wrapper.eq(StringUtils.isNotBlank(query.getSqlDbType()),DataCentre::getSqlDbType,query.getSqlDbType());
wrapper.eq(query.getDatabaseId()!=null,DataCentre::getDatabaseId,query.getDatabaseId());
wrapper.eq(query.getClusterId()!=null,DataCentre::getClusterId,query.getClusterId());
wrapper.eq(query.getClusterConfigurationId()!=null,DataCentre::getClusterConfigurationId,query.getClusterConfigurationId());
wrapper.gt(query.getStartTime()!=null,DataCentre::getStartTime,query.getStartTime());
wrapper.lt(query.getEndTime()!=null,DataCentre::getEndTime,query.getEndTime());
wrapper.eq(query.getFinishTime()!=null,DataCentre::getFinishTime,query.getFinishTime());
return wrapper;
}
}

View File

@ -6,11 +6,17 @@ import lombok.AllArgsConstructor;
import net.srt.disposition.convert.DataCheckSqlConvert;
import net.srt.disposition.dto.DataCheckSqlDto;
import net.srt.disposition.dto.DataSqlDto;
import net.srt.disposition.entity.DataCentre;
import net.srt.disposition.entity.DataCheckSqlEntity;
import net.srt.disposition.entity.DataDatabaseDevEntity;
import net.srt.disposition.entity.DataProductionTreeEntity;
import net.srt.disposition.mapper.DataCentreMapper;
import net.srt.disposition.mapper.DataCheckSqlMapper;
import net.srt.disposition.mapper.DataProductionMapper;
import net.srt.disposition.service.DataCheckSqlService;
import net.srt.disposition.vo.DataCheckVo;
import net.srt.disposition.vo.LogVo;
import net.srt.disposition.vo.Result;
import net.srt.flink.common.result.SqlExplainResult;
import net.srt.flink.common.utils.LogUtil;
import net.srt.flink.process.context.ProcessContextHolder;
@ -18,20 +24,29 @@ import net.srt.flink.process.model.ProcessEntity;
import net.srt.flink.process.model.ProcessStatus;
import net.srt.flink.process.model.ProcessStep;
import net.srt.flink.process.model.ProcessType;
import net.srt.flink.process.pool.ConsolePool;
import net.srt.framework.mybatis.service.impl.BaseServiceImpl;
import net.srt.framework.security.cache.TokenStoreCache;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import srt.cloud.framework.dbswitch.common.type.ProductTypeEnum;
import javax.servlet.http.HttpServletRequest;
import java.sql.*;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.*;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Service
@AllArgsConstructor
public class DataCheckSqlServiceImpl extends BaseServiceImpl<DataCheckSqlMapper, DataCheckSqlEntity> implements DataCheckSqlService {
private DataProductionMapper dataProductionMapper;
private TokenStoreCache storeCache;
private DataCentreMapper dataCentreMapper;
private HttpServletRequest request;
@Override
public DataCheckSqlEntity find(Integer id) {
DataCheckSqlEntity dataCheckSqlEntity = baseMapper.selectById(id);
@ -91,6 +106,251 @@ public class DataCheckSqlServiceImpl extends BaseServiceImpl<DataCheckSqlMapper,
return sqlExplainResults;
}
@Override
public DataCheckVo executeSql(DataSqlDto dataSqlDto) {
DataCheckVo dataCheckVo=null;
try {
dataCheckVo = selectColumns(dataSqlDto);
dataCheckVo.setEndTime(new Date());
//构建运维数据
DataCentre operationalRecordsEntity= getDataCentre(dataCheckVo,dataSqlDto);
//添加到运维作业表中
dataCentreMapper.insert(operationalRecordsEntity);
} catch (Exception e) {
throw new RuntimeException(e);
}
return dataCheckVo;
}
private DataCentre getDataCentre(DataCheckVo exeCuteSql, DataSqlDto dto) {
DataCentre operationalRecords = new DataCentre();
//获取租户id\项目id
Long projectId = storeCache.getProjectId(getAccessToken1());
operationalRecords.setProjectId(projectId);
operationalRecords.setSqlDbType(dto.getSqlDbType());
operationalRecords.setDatabaseId(dto.getDatabaseId());
operationalRecords.setJobName(dto.getJobName());
operationalRecords.setStatement(dto.getStatement());
operationalRecords.setExecuteType(dto.getBatchModel());
Result result = exeCuteSql.getResult();
List<Result> results = result.getResults();
String Result= "";
for (Result resultsDTO : results) {
Result=resultsDTO.getRowData().toString();
}
operationalRecords.setResult(Result);
operationalRecords.setEndTime(new Date());
operationalRecords.setStartTime(new Date());
operationalRecords.setTaskId(dto.getId());
operationalRecords.setCreateTime(new Date());
operationalRecords.setExecuteNo(UUID.randomUUID().toString());
operationalRecords.setDuration(0);
return operationalRecords;
}
public String getAccessToken1() {
String accessToken = request.getHeader("Authorization");
if (StringUtils.isBlank(accessToken)) {
accessToken = request.getParameter("access_token");
}
return accessToken;
}
public DataCheckVo selectColumns(DataSqlDto dto) throws Exception{
//获取数据库信息
List<DataDatabaseDevEntity> databaseList = dto.getDatabaseList();
DataDatabaseDevEntity databaseVO = new DataDatabaseDevEntity();
for (DataDatabaseDevEntity dataDatabaseVO : databaseList) {
if(dto.getDatabaseId().equals(dataDatabaseVO.getId())){
databaseVO.setId(dataDatabaseVO.getId());
databaseVO.setDatabaseName(dataDatabaseVO.getDatabaseName());
databaseVO.setDatabaseIp(dataDatabaseVO.getDatabaseIp());
databaseVO.setDatabasePort(dataDatabaseVO.getDatabasePort());
databaseVO.setUserName(dataDatabaseVO.getUserName());
databaseVO.setPassword(dataDatabaseVO.getPassword());
databaseVO.setDatabaseType(dataDatabaseVO.getDatabaseType());
databaseVO.setJdbcUrl(dataDatabaseVO.getJdbcUrl());
databaseVO.setVersion(dataDatabaseVO.getVersion());
databaseVO.setCreateTime(dataDatabaseVO.getCreateTime());
databaseVO.setUpdateTime(dataDatabaseVO.getUpdateTime());
databaseVO.setDeleted(dataDatabaseVO.getDeleted());
databaseVO.setCreator(dataDatabaseVO.getCreator());
databaseVO.setUpdater(dataDatabaseVO.getUpdater());
}
}
//日志对象构建
ProcessEntity process= BuildStes(dto);
DataCheckVo exeCuteSql = new DataCheckVo();
List<SqlExplainResult> sqlExplainResults = new ArrayList<>();
String current = null;
try {
// 记录SQL验证开始的日志
process.info("Start execute sql...");
if(databaseVO.getDatabaseType().equals(ProductTypeEnum.MYSQL.getIndex())){
String className = ProductTypeEnum.MYSQL.getDriveClassName();
//1. 注册驱动
try {
Class.forName(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
// 获取数据库连接
try (Connection conn = DriverManager.getConnection(databaseVO.getJdbcUrl(), databaseVO.getUserName(), databaseVO.getPassword())) {
// 获取表名
String dtoStatement = dto.getStatement();
String tableName = getTableName(dtoStatement);
// 获取数据库操作对象
try (Statement statement = conn.createStatement()) {
String sql = "SHOW COLUMNS FROM " + tableName;
// 获取列名并存储在一个列表中
List<String> columnNames = new ArrayList<>();
try (ResultSet selectColumnNames = statement.executeQuery(sql)) {
while (selectColumnNames.next()) {
String columnName = selectColumnNames.getString("Field");
columnNames.add(columnName);
System.out.println("列名: " + columnName);
}
}
// 获取表中数据
try (ResultSet row = statement.executeQuery(dtoStatement)) {
// 创建集合用于存储查询结果
List<Map<String, Object>> resultList = new ArrayList<>();
// 处理查询结果
while (row.next()) {
// 创建一个 Map 用于存储每一行的数据
Map<String, Object> rowData = new HashMap<>();
// 遍历字段名列表,获取每个字段对应的值,并存储到 Map 中
for (String columnName : columnNames) {
Object value = row.getObject(columnName);
rowData.put(columnName, value);
}
// 将 Map 添加到集合中
resultList.add(rowData);
}
Result resultsDTO=BuildResultsDTO(columnNames,resultList,dto);
exeCuteSql.setResult(resultsDTO);
exeCuteSql.setSuccess(true);
}
// 使用SQLUtils将输入的SQL解析为SQL语句列表
List<SQLStatement> stmtList = SQLUtils.parseStatements(dto.getStatement(), "SqlServer");
// 遍历列表中的每个SQL语句
for (SQLStatement item : stmtList) {
// 设置当前正在处理的SQL语句以便记录日志
current = item.toString();
// 获取SQL语句的类型例如SELECT、INSERT并添加到结果列表中
String type = item.getClass().getSimpleName();
sqlExplainResults.add(SqlExplainResult.success(type, current, null));
}
process.info("Execute sql succeed.");
}
} catch (SQLException e) {
// 如果在SQL解析过程中发生异常将失败的结果添加到列表中
sqlExplainResults.add(SqlExplainResult.fail(current, LogUtil.getError(e)));
String error = LogUtil.getError(e);
// 记录错误消息
process.error(error);
}
}else{
throw new Exception("目前只支持Mysql类型");
}
process.infoEnd();
} catch (Exception e) {
// 如果在SQL解析过程中发生异常将失败的结果添加到列表中
sqlExplainResults.add(SqlExplainResult.fail(current, LogUtil.getError(e)));
String error = LogUtil.getError(e);
// 记录错误消息
process.error(error);
}
return exeCuteSql;
}
private Result BuildResultsDTO(List<String> columnNames, List<Map<String, Object>> resultList, DataSqlDto dto) {
long seconds = new Date().getSeconds();
Result resultsDTO = new Result();
String dtoStatement = dto.getStatement();
List<Result> result = new ArrayList<>();
Result resultsDTO1 = new Result();
resultsDTO1.setColumns(columnNames);
resultsDTO1.setRowData(resultList);
resultsDTO1.setCount(resultList.size());
resultsDTO1.setSql(dtoStatement);
resultsDTO1.setSuccess(true);
resultsDTO1.setJobId(dto.getId());
long take = new Date().getSeconds() - seconds;
resultsDTO1.setTime(take);
resultsDTO1.setIfQuery(true);
result.add(resultsDTO1);
resultsDTO.setResults(result);
resultsDTO.setCount(resultList.size());
resultsDTO.setSql(dtoStatement);
resultsDTO.setSuccess(true);
resultsDTO.setJobId(dto.getId());
long take1 = new Date().getSeconds() - seconds;
resultsDTO.setTime(take1);
return resultsDTO;
}
private String getTableName(String dtoStatement) {
// 使用正则表达式匹配表名
Pattern pattern = Pattern.compile("from\\s+([a-zA-Z_][a-zA-Z0-9_]*)", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(dtoStatement);
String tableName ="";
// 查找匹配
if (matcher.find()) {
// 获取匹配的表名
tableName = matcher.group(1);
System.out.println("Table Name: " + tableName);
} else {
System.out.println("Table name not found.");
}
return tableName;
}
@Override
public LogVo getLog() {
LogVo clearLog = new LogVo();
ConsolePool instance = ConsolePool.getInstance();
if(instance.exist(getAccessToken())){
clearLog.setLog(instance.get(getAccessToken()).toString());
clearLog.setEnd(true);
return clearLog;
}else{
return clearLog;
}
}
@Override
public void clearLog() {
ConsolePool instance = ConsolePool.getInstance();
instance.remove(getAccessToken());
}
private ProcessEntity BuildStes(DataSqlDto dto) {
// 从上下文获取当前进程实体
ProcessEntity process = ProcessContextHolder.getProcess();

View File

@ -0,0 +1,28 @@
package net.srt.disposition.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
@Data
public class DataCheckVo {
private Integer id;
private String jobConfig;
private String jobManagerAddress;
private Integer status;
private boolean success;
private String statement;
private Integer jobId;
private Integer jobInstanceId;
private String error;
private Result result;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date endTime;
private String log;
}

View File

@ -0,0 +1,141 @@
package net.srt.disposition.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
/**
* @author : WangZhanpeng
* @date : 2023/12/27 19:14
*/
@Data
@Schema(description = "数据开发-运维中心")
public class DevelopmentOperationalRecordsVo {
@Schema(description = "主键")
private Long id;
@Schema(description = "所属项目唯一标识符")
private Long projectId;
@Schema(description = "SQL数据库类型")
private String sqlDbType;
@Schema(description = "数据库唯一标识符")
private Long databaseId;
@Schema(description = "集群唯一标识符")
private Integer clusterId;
@Schema(description = "集群配置唯一标识符")
private Long clusterConfigurationId;
@Schema(description = "会话信息")
private String session;
@Schema(description = "作业唯一标识符")
private Long jobId;
@Schema(description = "作业名称")
private String jobName;
@Schema(description = "作业管理器地址")
private String jobManagerAddress;
@Schema(description = "执行状态")
private Integer status;
@Schema(description = "方言")
private Integer dialect;
@Schema(description = "类型")
private String type;
@Schema(description = "SQL语句")
private String statement;
@Schema(description = "错误信息")
private String error;
@Schema(description = "执行结果")
private String result;
@Schema(description = "配置JSON字符串")
private String configJson;
@Schema(description = "开始时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
@Schema(description = "结束时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date endTime;
@Schema(description = "任务唯一标识符")
private Long taskId;
@Schema(description = "执行类型")
private String executeType;
@Schema(description = "调度唯一标识符")
private Long scheduleId;
@Schema(description = "调度节点唯一标识符")
private Long scheduleNodeId;
@Schema(description = "调度记录唯一标识符")
private Long scheduleRecordId;
@Schema(description = "调度节点记录唯一标识符")
private Long scheduleNodeRecordId;
@Schema(description = "实例状态")
private String instanceStatus;
@Schema(description = "执行完成时间戳")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date finishTime;
@Schema(description = "执行的SQL语句")
private String executeSql;
@Schema(description = "执行编号")
private String executeNo;
@Schema(description = "作业标识符")
private String jid;
@Schema(description = "执行持续时间")
private Integer duration;
@Schema(description = "实体的版本")
private Integer version;
@Schema(description = "表示实体是否已删除的标志")
private Integer deleted;
@Schema(description = "创建者的ID")
private Integer creator;
@Schema(description = "实体的创建时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
@Schema(description = "更新者的ID")
private Integer updater;
@Schema(description = "实体的更新时间")
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
}

View File

@ -0,0 +1,143 @@
package net.srt.disposition.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import net.srt.framework.common.utils.DateUtils;
import java.io.Serializable;
import java.util.Date;
/**
* @author : WangZhanpeng
* @date : 2023/12/24 19:59
*/
@Data
@Schema(description = "数据开发-sql作业")
public class DevelopmentTaskSaveVo implements Serializable {
@Schema(description = "唯一标识")
private Long id;
@Schema(description = "警报组标识")
private Long alertGroupId;
@Schema(description = "别名")
private String alias;
@Schema(description = "批处理模型")
private String batchModel;
@Schema(description = "目录标识")
private Long catalogueId;
@Schema(description = "检查点")
private String checkPoint;
@Schema(description = "集群配置标识")
private Long clusterConfigurationId;
@Schema(description = "集群标识")
private Long clusterId;
@Schema(description = "配置 JSON")
private String configJson;
@Schema(description = "数据库标识")
private Long databaseId;
@Schema(description = "方言")
private String dialect;
@Schema(description = "是否启用")
private String enabled;
@Schema(description = "环境标识")
private String envId;
@Schema(description = "片段")
private String fragment;
@Schema(description = "JAR标识")
private Long jarId;
@Schema(description = "作业实例标识")
private Long jobInstanceId;
@Schema(description = "名称")
private String name;
@Schema(description = "备注")
private String note;
@Schema(description = "开放传输")
private String openTrans;
@Schema(description = "并行度")
private Integer parallelism;
@Schema(description = "处理结束时间")
private String processEnd;
@Schema(description = "项目标识")
private Long projectId;
@Schema(description = "PV数据数量")
private Integer pvdataNum;
@Schema(description = "保存点路径")
private String savePointPath;
@Schema(description = "保存点策略")
private String savePointStrategy;
@Schema(description = "SQL数据库类型")
private String sqlDbType;
@Schema(description = "语句")
private String statement;
@Schema(description = "语句集")
private String statementSet;
@Schema(description = "步骤")
private String step;
@Schema(description = "类型")
private String type;
@Schema(description = "是否使用自动取消")
private String useAutoCancel;
@Schema(description = "是否使用变更日志")
private String useChangeLog;
@Schema(description = "是否使用结果")
private String useResult;
@Schema(description = "版本标识")
private Long versionId;
@Schema(description = "版本")
private Integer version;
@Schema(description = "已删除")
private String deleted;
@Schema(description = "创建者")
private Integer creator;
@Schema(description = "创建时间")
@JsonFormat(pattern = DateUtils.DATE_TIME_PATTERN)
private Date createTime;
@Schema(description = "更新者")
private Integer updater;
@Schema(description = "更新时间")
@JsonFormat(pattern = DateUtils.DATE_TIME_PATTERN)
private Date updateTime;
}

View File

@ -0,0 +1,9 @@
package net.srt.disposition.vo;
import lombok.Data;
@Data
public class LogVo {
private String log;
private boolean end;
}

View File

@ -0,0 +1,23 @@
package net.srt.disposition.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
@Data
public class Result {
private List<Result> results;
private boolean ifQuery;
private String sql;
private Long time;
private boolean success;
private String errorMsg;
private Integer count;
private List<String> columns;
private List<Map<String, Object>> rowData;
private Long jobId;
}