Compare commits

...

6 Commits

Author SHA1 Message Date
刘武 7f00c32dc7 数据处理模块新增 2024-09-28 12:28:18 +08:00
刘武 702817e275 数据处理模块新增 2024-09-28 11:48:28 +08:00
刘武 dc19916bca 数据处理模块新增 2024-09-27 22:30:09 +08:00
刘武 b1d3d51c7f 数据处理模块新增 2024-09-26 22:32:43 +08:00
刘武 b25c3bb686 数据处理模块新增 2024-09-26 22:03:38 +08:00
刘武 3ce339dbf9 数据处理模块新增 2024-09-26 21:23:38 +08:00
37 changed files with 857 additions and 183 deletions

1
.gitignore vendored
View File

@ -19,7 +19,6 @@ out
.project
.settings
.springBeans
*.yml
### IntelliJ IDEA ###
.idea

View File

@ -29,8 +29,10 @@
<groupId>com.muyu</groupId>
<artifactId>cloud-common-security</artifactId>
</dependency>
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
</dependencies>

View File

@ -6,17 +6,20 @@ import com.baomidou.mybatisplus.autoconfigure.MybatisPlusAutoConfiguration;
import com.muyu.cloud.common.many.datasource.constents.DatasourceContent;
import com.muyu.cloud.common.many.datasource.domain.model.DataSourceInfo;
import com.muyu.cloud.common.many.datasource.factory.DruidDataSourceFactory;
import com.muyu.cloud.common.many.datasource.init.InitDataSource;
import com.muyu.cloud.common.many.datasource.role.DynamicDataSource;
import com.muyu.cloud.common.saas.domain.Datasource;
import com.muyu.cloud.common.saas.domain.model.EntInfo;
import com.muyu.cloud.common.saas.exception.SaaSException;
import com.muyu.common.core.domain.Result;
import com.muyu.common.core.utils.SpringUtils;
import com.muyu.common.system.domain.Datasource;
import com.muyu.common.system.domain.SysUser;
import com.muyu.common.system.remote.RemoteSaaSService;
import com.muyu.common.system.remote.RemoteUserService;
import lombok.extern.log4j.Log4j2;
import org.mybatis.spring.boot.autoconfigure.MybatisAutoConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.AutoConfiguration;
@ -38,9 +41,14 @@ import java.util.Map;
@Component
@AutoConfiguration(before = {MybatisPlusAutoConfiguration.class, MybatisAutoConfiguration.class})
public class ManyDataSource implements ApplicationRunner{
@Autowired
private InitDataSource initDataSource;
private List<EntInfo> dataSourceInfoList(){
RemoteSaaSService remoteSaaSService = SpringUtils.getBean(RemoteSaaSService.class);
Result<List<Datasource>> tableDataInfoResult = remoteSaaSService.findDatabaseList();
Result<List<Datasource>> tableDataInfoResult = initDataSource.initDatasource();
if (tableDataInfoResult==null){
throw new SaaSException("saas远调数据源错误");
}

View File

@ -0,0 +1,59 @@
package com.muyu.cloud.common.many.datasource.init;
import com.muyu.cloud.common.saas.domain.Datasource;
import com.muyu.common.core.domain.Result;
import com.muyu.common.system.domain.Firm;
import lombok.extern.log4j.Log4j2;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
@Component
@Log4j2
public class InitDataSource {
public static final String USER="root";
public static final String PASSWORD="Lw030106";
@Bean
@Primary
public Result<List<Datasource>> initDatasource(){
ArrayList<Datasource> list = new ArrayList<>();
try {
DriverManager.registerDriver(new com.mysql.cj.jdbc.Driver());
Connection connection= DriverManager.getConnection("jdbc:mysql://47.101.53.251:3306/datasource?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT&useSSL=false",USER,PASSWORD);
String sql="select * from `datasource` ";
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(sql);
while (rs.next()){
Datasource datasource = new Datasource();
datasource.setId(rs.getInt("id"));
datasource.setFirmName(rs.getString("firm_name"));
datasource.setDatabaseName(rs.getString("database_name"));
list.add(datasource);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
return Result.success(list);
};
}

View File

@ -1,24 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.muyu</groupId>
<artifactId>cloud-modules</artifactId>
<version>3.6.3</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-data</artifactId>
<artifactId>cloud-saas</artifactId>
<description>
cloud-saas
</description>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-bootstrap</artifactId>
@ -50,6 +50,13 @@
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<!-- Mysql Connector -->
<dependency>
<groupId>com.mysql</groupId>
@ -79,9 +86,35 @@
<groupId>com.muyu</groupId>
<artifactId>cloud-common-api-doc</artifactId>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-modules-car</artifactId>
<version>3.6.3</version>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-modules-fence</artifactId>
<version>3.6.3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.paho</groupId>
<artifactId>org.eclipse.paho.client.mqttv3</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.iotdb</groupId>
<artifactId>iotdb-session</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.iotdb</groupId>
<artifactId>node-commons</artifactId>
<version>1.3.2</version>
</dependency>
</dependencies>

View File

@ -0,0 +1,15 @@
package com.muyu.data;
import com.muyu.common.security.annotation.EnableMyFeignClients;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
@SpringBootApplication
@EnableMyFeignClients
@EnableFeignClients
public class DataApplication {
public static void main(String[] args) {
SpringApplication.run(DataApplication.class,args);
}
}

View File

@ -0,0 +1,129 @@
package com.muyu.data.config;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import java.util.HashMap;
import java.util.Map;
/**
* @author
* @date 2022/10/31 18:05
* kafkaymlyml
*/
@SpringBootConfiguration
public class KafkaConsumerConfig {
/**
* Kafka
*/
@Value("${spring.kafka.consumer.bootstrap-servers}")
private String bootstrapServers;
/**
*
*/
@Value("${spring.kafka.consumer.group-id}")
private String groupId;
/**
*
*/
@Value("${spring.kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
/**
* Kafka
*/
@Value("${spring.kafka.properties.session.timeout.ms}")
private String sessionTimeout;
/**
* poll5reBalance
*/
@Value("${spring.kafka.properties.max.poll.interval.ms}")
private String maxPollIntervalTime;
@Value("${spring.kafka.consumer.max-poll-records}")
private String maxPollRecords;
@Value("${spring.kafka.consumer.auto-offset-reset}")
private String autoOffsetReset;
@Value("${spring.kafka.listener.concurrency}")
private Integer concurrency;
@Value("${spring.kafka.listener.missing-topics-fatal}")
private boolean missingTopicsFatal;
@Value("${spring.kafka.listener.poll-timeout}")
private long pollTimeout;
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>(16);
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
//是否自动提交偏移量默认值是true为了避免出现重复数据和数据丢失可以把它设置为false然后手动提交偏移量
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
//自动提交的时间间隔,自动提交开启时生效
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "2000");
//该属性指定了消费者在读取一个没有偏移量的分区或者偏移量无效的情况下该作何处理:
//earliest当各分区下有已提交的offset时从提交的offset开始消费无提交的offset时从头开始消费分区的记录
//latest当各分区下有已提交的offset时从提交的offset开始消费无提交的offset时消费新产生的该分区下的数据在消费者启动之后生成的记录
//none当各分区都存在已提交的offset时从提交的offset开始消费只要有一个分区不存在已提交的offset则抛出异常
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
//两次poll之间的最大间隔默认值为5分钟。如果超过这个间隔会触发reBalance
propsMap.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, maxPollIntervalTime);
//这个参数定义了poll方法最多可以拉取多少条消息默认值为500。如果在拉取消息的时候新消息不足500条那有多少返回多少如果超过500条每次只返回500。
//这个默认值在有些场景下太大有些场景很难保证能够在5min内处理完500条消息
//如果消费者无法在5分钟内处理完500条消息的话就会触发reBalance,
//然后这批消息会被分配到另一个消费者中,还是会处理不完,这样这批消息就永远也处理不完。
//要避免出现上述问题提前评估好处理一条消息最长需要多少时间然后覆盖默认的max.poll.records参数
//注需要开启BatchListener批量监听才会生效如果不开启BatchListener则不会出现reBalance情况
propsMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
//当broker多久没有收到consumer的心跳请求后就触发reBalance默认值是10s
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
//序列化建议使用Json这种序列化方式可以无需额外配置传输实体类
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
return propsMap;
}
@Bean
public ConsumerFactory<Object, Object> consumerFactory() {
// 配置消费者的 Json 反序列化的可信赖包,反序列化实体类需要
try (JsonDeserializer<Object> deserializer = new JsonDeserializer<>()) {
deserializer.trustedPackages("*");
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new JsonDeserializer<>(), deserializer);
}
}
/**
* kafka Kafka
* @return
*/
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Object, Object>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
//在侦听器容器中运行的线程数,一般设置为 机器数*分区数
factory.setConcurrency(concurrency);
// 消费监听接口监听的主题不存在时默认会报错所以设置为false忽略错误
factory.setMissingTopicsFatal(missingTopicsFatal);
// 自动提交关闭,需要设置手动消息确认
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
factory.getContainerProperties().setPollTimeout(pollTimeout);
// 设置为批量监听需要用List接收
// factory.setBatchListener(true);
return factory;
}
}

View File

@ -0,0 +1,127 @@
package com.muyu.data.config;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.transaction.KafkaTransactionManager;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
@Configuration
public class KafkaProviderConfig {
/**
* kafka
*/
@Value("${spring.kafka.producer.bootstrap-servers}")
private String bootstrapServers;
/**
* Kafka
*/
@Value("${spring.kafka.producer.transaction-id-prefix}")
private String transactionIdPrefix;
/**
*
*/
@Value("${spring.kafka.producer.acks}")
private String acks;
/**
*
*/
@Value("${spring.kafka.producer.retries}")
private String retries;
/**
*
*/
@Value("${spring.kafka.producer.batch-size}")
private String batchSize;
/**
*
*/
@Value("${spring.kafka.producer.buffer-memory}")
private String bufferMemory;
/**
*
*/
@Value("${spring.kafka.producer.key-serializer}")
private String keySerializer;
/**
*
*/
@Value("${spring.kafka.producer.value-serializer}")
private String valueSerializer;
/**
* map
* @return
*/
@Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>(16);
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
//acks=0 生产者在成功写入消息之前不会等待任何来自服务器的响应。
//acks=1 只要集群的首领节点收到消息,生产者就会收到一个来自服务器成功响应。
//acks=all :只有当所有参与复制的节点全部收到消息时,生产者才会收到一个来自服务器的成功响应。
//开启事务必须设为all
props.put(ProducerConfig.ACKS_CONFIG, acks);
//发生错误后消息重发的次数开启事务必须大于0
props.put(ProducerConfig.RETRIES_CONFIG, retries);
//当多个消息发送到相同分区时,生产者会将消息打包到一起,以减少请求交互. 而不是一条条发送
//批次的大小可以通过batch.size 参数设置.默认是16KB
//较小的批次大小有可能降低吞吐量批次大小为0则完全禁用批处理
//比如说kafka里的消息5秒钟Batch才凑满了16KB才能发送出去。那这些消息的延迟就是5秒钟
//实测batchSize这个参数没有用
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
//有的时刻消息比较少,过了很久,比如5min也没有凑够16KB,这样延时就很大,所以需要一个参数. 再设置一个时间,到了这个时间,
//即使数据没达到16KB,也将这个批次发送出去
props.put(ProducerConfig.LINGER_MS_CONFIG, "5000");
//生产者内存缓冲区的大小
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
//反序列化,和生产者的序列化方式对应
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer);
return props;
}
/**
*
* @return
*/
@Bean
public ProducerFactory<Object, Object> producerFactory() {
DefaultKafkaProducerFactory<Object, Object> factory = new DefaultKafkaProducerFactory<>(producerConfigs());
//开启事务,会导致 LINGER_MS_CONFIG 配置失效
factory.setTransactionIdPrefix(transactionIdPrefix);
return factory;
}
/**
* Kafka
* @param producerFactory
* @return
*/
@Bean
public KafkaTransactionManager<Object, Object> kafkaTransactionManager(ProducerFactory<Object, Object> producerFactory) {
return new KafkaTransactionManager<>(producerFactory);
}
/**
* KafkaTemplate
* @return
*/
@Bean
public KafkaTemplate<Object, Object> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}

View File

@ -0,0 +1,65 @@
package com.muyu.data.config;
import jakarta.annotation.Nullable;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
@Component
public class KafkaSendResultHandler implements ProducerListener<Object,Object> {
@Autowired
private KafkaTemplate<Object,Object> kafkaTemplate;
/**
* bean
*/
@PostConstruct
public void init(){
this.kafkaTemplate.setProducerListener(this);
}
/**
* Kafka
* @param producerRecord
* @param recordMetadata
*/
@Override
public void onSuccess(ProducerRecord producerRecord, RecordMetadata recordMetadata){
System.out.println("信息发送成功:"+ producerRecord.toString());
}
/**
* Kafka
* @param producerRecord the failed record
* @param recordMetadata The metadata for the record that was sent (i.e. the partition
* and offset). If an error occurred, metadata will contain only valid topic and maybe
* the partition. If the partition is not provided in the ProducerRecord and an error
* occurs before partition is assigned, then the partition will be set to
* RecordMetadata.UNKNOWN_PARTITION.
* @param exception the exception thrown
*/
@Override
public void onError(ProducerRecord producerRecord, @Nullable RecordMetadata recordMetadata,
Exception exception){
System.out.println("消息发送失败: "+ producerRecord.toString());
}
}

View File

@ -0,0 +1,36 @@
package com.muyu.data.config;
import lombok.NonNull;
import org.apache.kafka.clients.consumer.Consumer;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.listener.KafkaListenerErrorHandler;
import org.springframework.kafka.listener.ListenerExecutionFailedException;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
@Component
public class MyKafkaListenerErrorHandler implements KafkaListenerErrorHandler {
@Override
@NonNull
public Object handleError(@NonNull Message<?> message,
ListenerExecutionFailedException exception) {
return new Object();
}
@Override
@NonNull
public Object handleError(@NonNull Message<?> message,
@NonNull ListenerExecutionFailedException exception,
Consumer<?, ?> consumer) {
System.out.println("消息详情:"+ message);
System.out.println("异常信息:"+ exception);
System.out.println("消费者详情:" +consumer.groupMetadata());
System.out.println("监听主题:"+ consumer.listTopics());
return KafkaListenerErrorHandler.super.handleError(message, exception, consumer);
}
}

View File

@ -0,0 +1,30 @@
package com.muyu.data.controller;
import com.muyu.data.service.DataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("data")
public class DataController {
@Autowired
private DataService dataService;
}

View File

@ -0,0 +1,42 @@
package com.muyu.data.domian;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.muyu.common.core.annotation.Excel;
import jakarta.validation.constraints.NotBlank;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
@Data
@AllArgsConstructor
@NoArgsConstructor
@EqualsAndHashCode(callSuper = false)
@TableName("event")
public class Event {
@TableId(value ="event_id" )
@Excel(name = "事件id")
private Integer eventId;
@Excel(name = "事件名称")
private String eventName;
@Excel(name = "车辆vin")
private String carVin;
private String createBy;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date createTime;
}

View File

@ -0,0 +1,8 @@
package com.muyu.data.mapper;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface DataMapper {
}

View File

@ -0,0 +1,23 @@
package com.muyu.data.remote;
import com.muyu.common.core.domain.Result;
import com.muyu.domain.req.SysCarReq;
import com.muyu.domain.resp.SysCarVo;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
@FeignClient(name = "cloud-car")
public interface RemoteCarService {
@PostMapping("/car/selectSysCarVoList")
public Result<List<SysCarVo>> selectSysCarVoList(
@RequestBody SysCarReq sysCarReq);
}

View File

@ -0,0 +1,21 @@
package com.muyu.data.remote;
import com.muyu.common.core.domain.Result;
import com.muyu.fence.domain.req.ElectronicFenceGroupListReq;
import com.muyu.fence.domain.resp.GroupFenceListresp;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
@FeignClient(name = "cloud-fence")
public interface RemoteFenceService {
@PostMapping("/fence/selectGroupList")
public Result<List<GroupFenceListresp>> selectGroupList(
@RequestBody ElectronicFenceGroupListReq req);
}

View File

@ -0,0 +1,4 @@
package com.muyu.data.service;
public interface DataService {
}

View File

@ -0,0 +1,9 @@
package com.muyu.data.service.impl;
import org.springframework.stereotype.Service;
@Service
public class DataServiceImpl {
}

View File

@ -0,0 +1,57 @@
package com.muyu.data.util;
import com.muyu.common.core.domain.Result;
import com.muyu.data.remote.RemoteCarService;
import com.muyu.data.remote.RemoteFenceService;
import com.muyu.domain.resp.SysCarVo;
import com.muyu.fence.domain.resp.GroupFenceListresp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
@Component
public class InitDataRedis implements ApplicationRunner {
@Resource
private RedisTemplate redisTemplate;
@Autowired
private RemoteCarService remoteCarService;
@Autowired
private RemoteFenceService remoteFenceService;
@Override
public void run(ApplicationArguments args) throws Exception {
//查询数控库中的车辆列表基本信息
Result<List<SysCarVo>> sysCarVoListResult = remoteCarService.selectSysCarVoList(null);
List<SysCarVo> sysCarVoList = sysCarVoListResult.getData();
//将全部信息存入redis中
for (SysCarVo sysCarVo : sysCarVoList) {
String carVin = sysCarVo.getCarVin();
redisTemplate.opsForValue().set("four:car:"+carVin,sysCarVo);
}
//查询车辆围栏组列表信息
Result<List<GroupFenceListresp>> groupListResult = remoteFenceService.selectGroupList(null);
List<GroupFenceListresp> groupFenceList = groupListResult.getData();
//将去全部信息存入redis中
for (GroupFenceListresp groupFenceListresp : groupFenceList) {
Long id = groupFenceListresp.getId();
redisTemplate.opsForValue().set("four:fenceGroup:"+id,groupFenceListresp);
}
}
}

View File

@ -0,0 +1,29 @@
package com.muyu.data.util;
import org.apache.iotdb.isession.util.Version;
import org.apache.iotdb.rpc.IoTDBConnectionException;
import org.apache.iotdb.session.Session;
public class Iotdb {
public static void main(String[] args) throws IoTDBConnectionException {
System.out.println("测试数据库开始~~~~~");
// 初始化与连接
Session session = new Session.Builder()
.host("47.101.53.251")
.port(6667)
.username("root")
.password("root")
.version(Version.V_1_0)
.build();
// 开启session Rpc不压缩
session.open(false);
//
}
}

View File

@ -0,0 +1,53 @@
package com.muyu.data.util;
import org.eclipse.paho.client.mqttv3.*;
public class Receive {
public static void main(String[] args) {
String topic = "vehicle";
String broker = "tcp://47.101.53.251:1883";
String clientId="lw";
try {
MqttClient mqttClient= new MqttClient(broker,clientId);
MqttConnectOptions connectOptions=new MqttConnectOptions();
connectOptions.setCleanSession(true);
System.out.println("Connecting to broker" + broker);
mqttClient.connect(connectOptions);
System.out.println("已连接");
mqttClient.setCallback(new MqttCallback(){
@Override
public void connectionLost(Throwable throwable) {
System.out.println("Connect lost!");
}
@Override
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
System.out.println("Message arrived. topic:"+topic);
}
@Override
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
}
});
mqttClient.subscribe(topic);
System.out.println("Subscribed to topic " + topic);
} catch (MqttException e) {
System.out.println("reason "+e.getReasonCode());
System.out.println("msg " +e.getMessage());
System.out.println("loc " +e.getLocalizedMessage());
System.out.println("cause "+e.getCause());
System.out.println("excep "+e);
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,59 @@
# Tomcat
server:
port: 10003
# nacos线上地址
nacos:
addr: 47.101.53.251:8848
user-name: nacos
password: nacos
namespace: four
# SPRING_AMQP_DESERIALIZATION_TRUST_ALL=true spring.amqp.deserialization.trust.all
# Spring
spring:
amqp:
deserialization:
trust:
all: true
main:
allow-bean-definition-overriding: true
application:
# 应用名称
name: cloud-data
profiles:
# 环境配置
active: dev
cloud:
nacos:
discovery:
# 服务注册地址
server-addr: ${nacos.addr}
# nacos用户名
username: ${nacos.user-name}
# nacos密码
password: ${nacos.password}
# 命名空间
namespace: ${nacos.namespace}
config:
# 服务注册地址
server-addr: ${nacos.addr}
# nacos用户名
username: ${nacos.user-name}
# nacos密码
password: ${nacos.password}
# 命名空间
namespace: ${nacos.namespace}
# 配置文件格式
file-extension: yml
# 共享配置
shared-configs:
# 系统共享配置
- application-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# 系统环境Config共享配置
- application-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# xxl-job 配置文件
- application-xxl-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# rabbit 配置文件
- application-rabbit-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
logging:
level:
com.muyu.fence.mapper: DEBUG

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-saas"/>
<property name="log.path" value="logs/cloud-data"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-saas"/>
<property name="log.path" value="logs/cloud-data"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
@ -9,7 +9,7 @@
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
@ -23,7 +23,9 @@
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
@ -43,7 +45,9 @@
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
@ -63,13 +67,16 @@
</encoder>
</appender>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
</root>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-saas"/>
<property name="log.path" value="logs/cloud-data"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
@ -9,7 +9,7 @@
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
@ -23,7 +23,9 @@
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
@ -43,7 +45,9 @@
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
@ -63,13 +67,16 @@
</encoder>
</appender>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
</root>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>

View File

@ -22,6 +22,7 @@ import org.springframework.cloud.openfeign.EnableFeignClients;
DruidDataSourceAutoConfigure.class,
DynamicDataSourceAutoConfiguration.class
})
public class CloudElectronicFenceApplication {
public static void main (String[] args) {
SpringApplication.run(CloudElectronicFenceApplication.class, args);

View File

@ -20,6 +20,8 @@ import java.util.Date;
@NoArgsConstructor
public class ElectroicFenceListReq {
/**
*
*/

View File

@ -18,7 +18,7 @@ import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
@EnableMyFeignClients
@SpringBootApplication
public class CloudSystemApplication {
public static void main (String[] args) {
SpringApplication.run(CloudSystemApplication.class, args);
public static void main(String[] args) {
SpringApplication.run(CloudSystemApplication.class, args);
}
}

View File

@ -96,7 +96,7 @@ public class SysUserServiceImpl extends ServiceImpl<SysUserMapper, SysUser> impl
/**
*
*
* @param userName
* @param
*
* @return
*/
@ -105,7 +105,6 @@ public class SysUserServiceImpl extends ServiceImpl<SysUserMapper, SysUser> impl
String databaseName = firm.getDatabaseName();
String userName = firm.getUserName();
String user="root";
String password="Lw030106";
SysUser sysUser=new SysUser();

View File

@ -1,19 +0,0 @@
package com.muyu.saas;
import com.alibaba.druid.spring.boot3.autoconfigure.DruidDataSourceAutoConfigure;
import com.muyu.common.security.annotation.EnableMyFeignClients;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
@SpringBootApplication
@EnableMyFeignClients
@MapperScan(value = "com.muyu.saas.mapper")
public class SaasApplication {
public static void main(String[] args) {
SpringApplication.run(SaasApplication.class,args);
}
}

View File

@ -1,64 +0,0 @@
package com.muyu.saas.controller;
import com.muyu.common.core.domain.Result;
import com.muyu.saas.domain.Datasource;
import com.muyu.saas.service.SaasService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping("saas")
public class SaaSController {
@Autowired
private SaasService saasService;
/**
*
* @return
*/
@GetMapping("findDatabaseList")
public Result<List<Datasource>> findDatabaseList(){
List<Datasource> list=saasService.list();
return Result.success(list);
};
/**
*
* @param datasource
* @return
*/
@PostMapping("addDatasource")
public Result addDatasource(@RequestBody Datasource datasource){
saasService.save(datasource);
return Result.success("添加成功");
};
@PostMapping("updDatasource")
public Result updDatasource(@RequestBody Datasource datasource){
saasService.updateById(datasource);
return Result.success("修改成功");
}
/**
*
* @param id
* @return
*/
@PostMapping("delDatasource/{id}")
public Result delDatasource(@PathVariable("id") Integer id){
saasService.removeById(id);
return Result.success("删除成功");
}
}

View File

@ -1,26 +0,0 @@
package com.muyu.saas.domain;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@TableName(value = "datasource", autoResultMap = true)
public class Datasource {
@TableId(value = "id",type = IdType.AUTO)
private Integer id;
public String firmName;
public String databaseName;
}

View File

@ -1,8 +0,0 @@
package com.muyu.saas.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.muyu.saas.domain.Datasource;
public interface SaaSMapper extends BaseMapper<Datasource> {
}

View File

@ -1,8 +0,0 @@
package com.muyu.saas.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.muyu.saas.domain.Datasource;
public interface SaasService extends IService<Datasource> {
}

View File

@ -1,18 +0,0 @@
package com.muyu.saas.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.muyu.saas.domain.Datasource;
import com.muyu.saas.mapper.SaaSMapper;
import com.muyu.saas.service.SaasService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class SaasServiceImpl extends ServiceImpl<SaaSMapper,Datasource> implements SaasService {
@Autowired
private SaaSMapper saaSMapper;
}

View File

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.muyu.saas.mapper.SaaSMapper">
</mapper>

View File

@ -18,7 +18,8 @@
<module>cloud-modules-wechat</module>
<module>cloud-modules-breakdown</module>
<module>cloud-modules-warn</module>
<module>cloud-saas</module>
<module>cloud-data</module>
</modules>
<artifactId>cloud-modules</artifactId>