feat:规则引擎(优化)

master_fei
Yunfei Du 2024-05-16 21:34:05 +08:00
parent a48cd106b6
commit fda9486485
18 changed files with 2 additions and 396 deletions

View File

@ -1,8 +1,6 @@
package com.etl.data.client.config;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.http.converter.json.GsonBuilderUtils;
import org.springframework.stereotype.Component;
/**
@ -13,4 +11,5 @@ import org.springframework.stereotype.Component;
@Component
@Import ( value = {DataAccessClientRunner.class})
public class DataAccessClientConfig {
}

View File

@ -1,23 +1,15 @@
package com.etl.data.client.config;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.etl.data.client.connPool.service.ConnPoolManagementService;
import com.etl.data.client.jdbcUtils.JDBCConcreteClass;
import com.etl.data.domain.DataSource;
import com.etl.data.domain.dataSource.DataSourceConfig;
import com.etl.data.domain.req.DataSourceQueryReq;
import com.etl.data.source.remote.RemoteDataSourceService;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSetMetaData;
import java.util.List;
import java.util.Map;
/**
* 访
@ -40,7 +32,6 @@ public class DataAccessClientRunner implements ApplicationRunner {
try {
DruidPooledConnection druidPooledConnection = ConnPoolManagementService.get ( dataSourceList.get ( 0 ).getKey ( ) );
// Map< DruidPooledConnection, String > connToKey = ConnPoolManagementService.getConnToKey ( );
log.info ( "connToKey:{}", druidPooledConnection );
@ -50,6 +41,7 @@ public class DataAccessClientRunner implements ApplicationRunner {
// for (int i = 1; i <= rsd.getColumnCount ( ); i++) {
// log.info ( "类型:{}", rsd.getColumnClassName ( i ) );
// }
ConnPoolManagementService.returnConnection ( druidPooledConnection );
} catch (Exception e) {
log.error ( "数据访问客户端运行程序异常:{}", e.getMessage ( ) );

View File

@ -3,7 +3,6 @@ package com.etl.data.client.connPool.pool;
import com.alibaba.druid.pool.DruidDataSource;
import com.etl.common.core.exception.ServiceException;
import com.etl.common.core.utils.StringUtils;
import com.etl.common.security.utils.SecurityUtils;
import lombok.extern.log4j.Log4j2;
import java.util.concurrent.ConcurrentHashMap;

View File

@ -1,11 +1,8 @@
package com.etl.data.client.jdbcUtils;
import org.springframework.stereotype.Component;
import javax.xml.bind.annotation.XmlAccessorOrder;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
/**
* jdbc

View File

@ -4,7 +4,6 @@ import lombok.extern.log4j.Log4j2;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**

View File

@ -1,84 +0,0 @@
package com.etl.data.domain.dataSource;
import com.alibaba.druid.pool.DruidDataSource;
import com.etl.data.domain.DataSource;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Component;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
/**
*
* @author YunFei.Du
* @date 15:23 2024/5/13
*/
@Component
@Log4j2
public class MyDataSource {
private static HashMap<String, DruidDataSource > polls=new HashMap<> ( );
/**
*
*/
public static void info(List< DataSource> dataSourceList){
for (DataSource dataSource : dataSourceList) {
String jdbUrl ="jdbc:"+dataSource.getJdbcDriver()+"://"+dataSource.getHost()+":"+dataSource.getPort()+"/"+dataSource.getDatabaseName();
String key=dataSource.getId ()+"_"+dataSource.getName ();
//创建德鲁伊数据库连接池
DruidDataSource druidSource = new DruidDataSource ( );
druidSource.setUrl ( jdbUrl );
druidSource.setUsername ( dataSource.getUsername () );
druidSource.setPassword ( dataSource.getPassword () );
druidSource.setInitialSize ( Integer.valueOf ( dataSource.getInitNum () ) );
druidSource.setMinIdle ( Integer.valueOf ( dataSource.getMaxNum ()) );
druidSource.setMaxActive ( Integer.valueOf ( dataSource.getMaxNum ()) );
try {
druidSource.init ();
} catch (SQLException e) {
throw new RuntimeException ( e );
}
addDataSource ( key,druidSource );
}
}
private static void addDataSource(String key, DruidDataSource druidSource) {
if (!polls.containsKey ( key )){
polls.put ( key,druidSource );
}
}
/**
* 使getConnection()
* @param key
* @return
*/
public static Connection getConnection(String key) {
//根据key获取连接池
DruidDataSource druidDataSource = polls.get ( key );
if (druidDataSource!=null){
try {
return druidDataSource.getConnection ();
} catch (SQLException e) {
throw new RuntimeException ( e );
}
}
return null;
}
/**
*
*/
public static void close(Connection connection) {
if (connection!=null){
try {
connection.close ();
} catch (SQLException e){
log.error ( "释放连接失败" );
}
}
}
}

View File

@ -1,38 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.scope.model.DataProcessModel;
import org.springframework.stereotype.Component;
/**
* @Author: YunFei.Du
* @date: 2024/4/29
* @Description:
* @Version: 1.0
*/
@Component
public class DataModelContext implements ScopeContext <DataProcessModel>{
private static final ThreadLocal<DataProcessModel> THREAD_LOCAL = new ThreadLocal<>();
private final RecordContext recordContext;
public DataModelContext (RecordContext recordContext) {
this.recordContext = recordContext;
}
@Override
public DataProcessModel get () {
return THREAD_LOCAL.get();
}
@Override
public void set (DataProcessModel dataProcessModel) {
THREAD_LOCAL.set(dataProcessModel);
}
@Override
public void clear(){
THREAD_LOCAL.remove();
}
}

View File

@ -1,47 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.Engine;
import com.etl.rule.engine.model.DataModel;
import com.etl.rule.engine.scope.model.DataProcessModel;
/**
* @Author: YunFei.Du
* @date: 2024/5/6
* @Description:
* @Version: 1.0
*/
public abstract class DataModelEngine implements Engine<DataProcessModel> {
private DataModelContext dataModelContext;
@Override
public DataProcessModel get (){
return dataModelContext.get();
}
public DataModel getModel(){
return get().getDataModel();
}
public String getKey () {
return getModel().getKey();
}
public Object getValue () {
return getModel().getValue();
}
public String getSourceType () {
return getModel().getSourceType();
}
public String getProcessType () {
return getModel().getProcessType();
}
public Class<?> getProcessClass () {
return getModel().getProcessClass();
}
}

View File

@ -1,38 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.scope.model.DataSetProcessModel;
import org.springframework.stereotype.Component;
/**
* @Author: YunFei.Du
* @date: 2024/4/29
* @Description:
* @Version: 1.0
*/
@Component
public class DataSetContext implements ScopeContext <DataSetProcessModel>{
private static final ThreadLocal<DataSetProcessModel> THREAD_LOCAL = new ThreadLocal<>();
private final TaskContext taskContext;
public DataSetContext (TaskContext taskContext) {
this.taskContext = taskContext;
}
@Override
public DataSetProcessModel get() {
return THREAD_LOCAL.get();
}
@Override
public void clear() {
THREAD_LOCAL.remove();
}
@Override
public void set(DataSetProcessModel dataSetProcessModel) {
THREAD_LOCAL.set(dataSetProcessModel);
}
}

View File

@ -1,30 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.Engine;
import com.etl.rule.engine.model.DataSetModel;
import com.etl.rule.engine.scope.model.DataSetProcessModel;
/**
*
* @author YunFei.Du
* @date 8:52 2024/5/15
*/
public abstract class DataSetEngine implements Engine<DataSetProcessModel> {
private DataSetContext dataSetContext;
@Override
public DataSetProcessModel get() {
return dataSetContext.get();
}
public DataSetModel getDataSetModel() {
return get().getDataSetModel();
}
public DataSetContext getDataSetContext() {
return dataSetContext;
}
public void setDataSetContext(DataSetContext dataSetContext) {
this.dataSetContext = dataSetContext;
}
}

View File

@ -1,34 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.scope.model.RecordProcessModel;
/**
* /
* @author YunFei.Du
* @date 8:50 2024/5/15
*/
public class RecordContext implements ScopeContext<RecordProcessModel>{
private static final ThreadLocal<RecordProcessModel> THREAD_LOCAL = new ThreadLocal<>();
private final DataSetContext dataSetContext;
private RecordContext (DataSetContext dataSetContext) {
this.dataSetContext = dataSetContext;
}
@Override
public RecordProcessModel get() {
return THREAD_LOCAL.get();
}
@Override
public void clear() {
THREAD_LOCAL.remove();
}
@Override
public void set(RecordProcessModel recordProcessModel) {
THREAD_LOCAL.set(recordProcessModel);
}
}

View File

@ -1,19 +0,0 @@
package com.etl.rule.engine.scope;
import com.etl.rule.engine.Engine;
import com.etl.rule.engine.scope.model.RecordProcessModel;
/**
* @Author: YunFei.Du
* @date: 2024/5/6
* @Description:
* @Version: 1.0
*/
public abstract class RecordEngine implements Engine<RecordProcessModel> {
private RecordContext recordContext;
@Override
public RecordProcessModel get() {
return null;
}
}

View File

@ -1,15 +0,0 @@
package com.etl.rule.engine.scope;
/**
* @Author: YunFei.Du
* @date: 2024/5/6
* @Description:
* @Version: 1.0
*/
public interface ScopeContext<V> {
V get();
void clear();
void set(V v);
}

View File

@ -1,14 +0,0 @@
package com.etl.rule.engine.scope;
/**
* @Author: YunFei.Du
* @date: 2024/4/29
* @Description:
* @Version: 1.0
*/
public class TaskContext {
public static TaskContext build(){
return new TaskContext();
}
}

View File

@ -1,16 +0,0 @@
package com.etl.rule.engine.scope.model;
import com.etl.rule.engine.model.DataModel;
import lombok.Data;
/**
* @Author: YunFei.Du
* @date: 2024/5/5
* @Description:
* @Version: 1.0
*/
@Data
public class DataProcessModel {
private DataModel dataModel;
}

View File

@ -1,16 +0,0 @@
package com.etl.rule.engine.scope.model;
import com.etl.rule.engine.model.DataSetModel;
import lombok.Data;
/**
* @Author: YunFei.Du
* @date: 2024/5/5
* @Description:
* @Version: 1.0
*/
@Data
public class DataSetProcessModel {
private DataSetModel dataSetModel;
}

View File

@ -1,18 +0,0 @@
package com.etl.rule.engine.scope.model;
import com.etl.rule.engine.model.RecordModel;
import lombok.Data;
/**
* @Author: YunFei.Du
* @date: 2024/5/5
* @Description:
* @Version: 1.0
*/
@Data
public class RecordProcessModel {
private String[] keys;
private RecordModel recordModel;
}

View File

@ -1,11 +0,0 @@
package com.etl.rule.engine.scope;
/**
* @ClassName DataModelEngine
* @Description
* @Author YunFei.Du
* @Date 2024/5/14 18:36
*/
public class DataModelEngine {
}