Compare commits

...

19 Commits

Author SHA1 Message Date
Aaaaaaaa 6c2daa0d7e Merge branch 'master' into xxy
# Conflicts:
#	cloud-modules/cloud-modules-protocolparsing/src/main/java/com/muyu/mqtt/Demo.java
2024-10-02 10:23:47 +08:00
Aaaaaaaa 5a64054615 Merge branch 'master' of https://gitea.qinmian.online/a_bazu/cloud-server-8 2024-10-02 10:22:59 +08:00
张腾 903fecc2cc 121 2024-10-02 10:22:41 +08:00
张腾 e46b6325c4 整合 Kafka依赖并更新版本管理
- 在 cloud-common-kafka模块中添加 cloud-common-core依赖
- 移除 cloud-modules-carData 中的 Kafka 依赖版本声明
- 在根 pom.xml 中统一管理 Kafka 依赖版本
- 更新 cloud-common-kafka 依赖版本为 3.6.3
2024-10-02 10:10:10 +08:00
Aaaaaaaa 217ffd81ab Merge branch 'master' into xxy 2024-10-02 10:00:30 +08:00
Aaaaaaaa b2c16300f4 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	cloud-common/cloud-common-kafka/pom.xml
#	cloud-common/cloud-common-kafka/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
2024-10-02 09:58:47 +08:00
Aaaaaaaa b07b7a1d2d 添加Kafka消息队列配置和常量类
- 新增KafkaConstants类,定义Kafka主题和消费组常量
- 新增KafkaConsumerConfig类,配置Kafka消费者
- 新增KafkaProviderConfig类,配置Kafka生产者
- 添加AutoConfiguration.imports文件,导入Kafka配置类
- 创建cloud-common-kafka模块的pom.xml文件,添加依赖
2024-10-02 09:58:09 +08:00
张腾 614ddceebc 删除了IoTDB相关的实体类、接口和注解。这些代码可能已经移动到另一个位置或者不再需要这些结构。受影响的代码包括:
- 删除了`com.muyu.carData.domain.IoTDBRecord`实体类,它用于定义IoT数据库记录。
- 删除了`com.muyu.carData.interfaces.IoTDBRecordable`接口,它定义了如何将对象转换为IoTDBRecord。
- 删除了`com.muyu.carData.annotation.IoTTableName`注解,它用于标记类对应IoT数据库表的名称。
- 删除了`com.muyu.carData.constract.IoTDBTableParam`类,它包含IoT数据库表参数的常量定义。
2024-10-02 09:43:00 +08:00
张腾 a17d1b4557 feat():修复合并代码 2024-10-02 09:38:43 +08:00
Aaaaaaaa 057ad3ded5 Merge branch 'master' into xxy 2024-10-02 09:31:55 +08:00
张腾 1542f34db3 Merge remote-tracking branch 'origin/dev.carData' into dev
# Conflicts:
#	cloud-common/pom.xml
2024-10-02 09:28:58 +08:00
张腾 87b3eccff5 feat():更新最新代码 2024-10-02 09:12:44 +08:00
Aaaaaaaa 735e7f56e0 协议解析:完成解析方法实现kafka的投产,并拆入自己的模块 2024-09-30 20:58:08 +08:00
张腾 af798f18e5 feat():编辑卡夫卡代码 2024-09-30 16:52:26 +08:00
Aaaaaaaa 0d82041676 Merge branch 'xxy' into dev 2024-09-30 10:12:22 +08:00
Aaaaaaaa 7a88f1da14 修改报错 2024-09-30 10:11:40 +08:00
niuwu666 b4f832925c Merge pull request 'dev.carData' (#4) from dev.carData into dev
Reviewed-on: #4
2024-09-30 09:04:26 +08:00
张腾 5ab005839c commit()yml 2024-09-30 09:03:40 +08:00
张腾 6f52789cc0 commit()yml 2024-09-30 09:03:17 +08:00
67 changed files with 2043 additions and 174 deletions

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# Spring
spring:
application:

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.muyu</groupId>
<artifactId>cloud-common</artifactId>
<version>3.6.3</version>
</parent>
<artifactId>cloud-common-kafka</artifactId>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.0.0</version>
</dependency>
<!-- 项目公共核心 -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-core</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,54 @@
package com.muyu.common.kafka.config;
import com.muyu.common.kafka.constants.KafkaConstants;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.HashMap;
import java.util.Map;
/**
* kafka
*/
@Configuration
public class KafkaConsumerConfig {
@Bean
public KafkaConsumer kafkaConsumer() {
Map<String, Object> configs = new HashMap<>();
//kafka服务端的IP和端口,格式:(ip:port)
configs.put("bootstrap.servers", "60.204.221.52:9092");
//开启consumer的偏移量(offset)自动提交到Kafka
configs.put("enable.auto.commit", true);
//consumer的偏移量(offset) 自动提交的时间间隔,单位毫秒
configs.put("auto.commit.interval", 5000);
//在Kafka中没有初始化偏移量或者当前偏移量不存在情况
//earliest, 在偏移量无效的情况下, 自动重置为最早的偏移量
//latest, 在偏移量无效的情况下, 自动重置为最新的偏移量
//none, 在偏移量无效的情况下, 抛出异常.
configs.put("auto.offset.reset", "latest");
//请求阻塞的最大时间(毫秒)
configs.put("fetch.max.wait", 500);
//请求应答的最小字节数
configs.put("fetch.min.size", 1);
//心跳间隔时间(毫秒)
configs.put("heartbeat-interval", 3000);
//一次调用poll返回的最大记录条数
configs.put("max.poll.records", 500);
//指定消费组
configs.put("group.id", KafkaConstants.KafkaGrop);
//指定key使用的反序列化类
Deserializer keyDeserializer = new StringDeserializer();
//指定value使用的反序列化类
Deserializer valueDeserializer = new StringDeserializer();
//创建Kafka消费者
KafkaConsumer kafkaConsumer = new KafkaConsumer(configs, keyDeserializer, valueDeserializer);
return kafkaConsumer;
}
}

View File

@ -0,0 +1,45 @@
package com.muyu.common.kafka.config;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.HashMap;
import java.util.Map;
/**
* kafka
*/
@Configuration
public class KafkaProviderConfig {
@Bean
public KafkaProducer kafkaProducer() {
Map<String, Object> configs = new HashMap<>();
//#kafka服务端的IP和端口,格式:(ip:port)
configs.put("bootstrap.servers", "47.116.173.119:9092");
//客户端发送服务端失败的重试次数
configs.put("retries", 2);
//多个记录被发送到同一个分区时,生产者将尝试将记录一起批处理成更少的请求.
//此设置有助于提高客户端和服务器的性能,配置控制默认批量大小(以字节为单位)
configs.put("batch.size", 16384);
//生产者可用于缓冲等待发送到服务器的记录的总内存字节数(以字节为单位)
configs.put("buffer-memory", 33554432);
//生产者producer要求leader节点在考虑完成请求之前收到的确认数,用于控制发送记录在服务端的持久化
//acks=0,设置为0,则生产者producer将不会等待来自服务器的任何确认.该记录将立即添加到套接字(socket)缓冲区并视为已发送.在这种情况下,无法保证服务器已收到记录,并且重试配置(retries)将不会生效(因为客户端通常不会知道任何故障),每条记录返回的偏移量始终设置为-1.
//acks=1,设置为1,leader节点会把记录写入本地日志,不需要等待所有follower节点完全确认就会立即应答producer.在这种情况下,在follower节点复制前,leader节点确认记录后立即失败的话,记录将会丢失.
//acks=all,acks=-1,leader节点将等待所有同步复制副本完成再确认记录,这保证了只要至少有一个同步复制副本存活,记录就不会丢失.
configs.put("acks", "-1");
//指定key使用的序列化类
Serializer keySerializer = new StringSerializer();
//指定value使用的序列化类
Serializer valueSerializer = new StringSerializer();
//创建Kafka生产者
KafkaProducer kafkaProducer = new KafkaProducer(configs, keySerializer, valueSerializer);
return kafkaProducer;
}
}

View File

@ -0,0 +1,9 @@
package com.muyu.common.kafka.constants;
public class KafkaConstants {
public final static String KafkaTopic = "carJsons";
// public final static String KafkaGrop = "kafka_grop";
}

View File

@ -1,4 +1,4 @@
package com.muyu.carData.config.kafkaconfig;
package com.muyu.kafkaconfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
@ -28,7 +28,7 @@ public class KafkaConfig {
//生产者可用于缓冲等待发送到服务器的记录的总内存字节数
configs.put("buffer-memory",3554432);
/**
*producerleader,
* ,
*acks=0,0,producer.(socket)
* .,,(retries)(),-1.
*acks=1,1,leader,followerproducer.,

View File

@ -22,6 +22,7 @@
<module>cloud-common-rabbit</module>
<module>cloud-common-saas</module>
<module>cloud-common-wechat</module>
<module>cloud-common-kafka</module>
</modules>
<artifactId>cloud-common</artifactId>

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# Spring
spring:

View File

@ -11,6 +11,10 @@
<artifactId>cloud-modules-carData</artifactId>
<description>
数据处理模块
</description>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
@ -83,14 +87,17 @@
<version>2.9.3</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.0.0</version>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.apache.iotdb</groupId>
<artifactId>iotdb-session</artifactId>
<version>0.13.1</version>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-rabbit</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -1,11 +1,9 @@
package com.muyu.carData;
import com.muyu.carData.listener.MyListener;
import com.muyu.common.security.annotation.EnableMyFeignClients;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import javax.swing.*;
/**
* @Author
@ -19,7 +17,8 @@ import javax.swing.*;
public class CarDataApplication {
public static void main(String[] args) {
SpringApplication.run(CarDataApplication.class,args);
System.out.println("caused: Handler dispatch failed; nested exception is java.lang.NoSuchMethodError: java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer;;caused: java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer;");
SpringApplication application = new SpringApplication(CarDataApplication.class);
application.addListeners(new MyListener());
application.run(args);
}
}

View File

@ -1,18 +0,0 @@
package com.muyu.carData.annotation;
import java.lang.annotation.*;
/**
* @Author
* @Packagecom.muyu.carData.annotation
* @Projectcloud-server-8
* @nameIoTTableName
* @Date2024/9/27 19:29
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
public @interface IoTTableName {
String value() default "";
}

View File

@ -1,13 +0,0 @@
package com.muyu.carData.constract;
/**
* @Author
* @Packagecom.muyu.carData.constract
* @Projectcloud-server-8
* @nameIoTDBTableParam
* @Date2024/9/27 20:02
*/
public class IoTDBTableParam {
public static final String SYSLOG_IOT_TABLE = "student";
}

View File

@ -15,7 +15,7 @@ import org.springframework.stereotype.Component;
import java.time.Duration;
import java.util.Collection;
/**
/**
* @Author
* @Packagecom.muyu.carData.consumer
* @Projectcloud-server-8
@ -29,7 +29,7 @@ public class MyKafkaConsumer implements InitializingBean {
@Autowired
private KafkaConsumer kafkaConsumer;
private final String topicName = "test";
private final String topicName = "carJsons";
@Override
public void afterPropertiesSet() throws Exception {
@ -43,13 +43,14 @@ public class MyKafkaConsumer implements InitializingBean {
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
//从consumerRecord中获取消费数据
String value = consumerRecord.value();
log.info("从Kafka中消费的原始数据{}",value);
//转换为java对象
Student stu = JSONUtil.toBean(value, Student.class);
log.info("消费数据转换为Java对象{}",stu);
log.info("从Kafka中消费的原始数据===============>>{}",value);
}
}
});
thread.start();
log.info("启动线程结束监听topic:{}",topicName);
}
}

View File

@ -1,4 +1,4 @@
package com.muyu.carData.testcontroller;
package com.muyu.carData.controller;
import com.github.benmanes.caffeine.cache.Cache;
import com.muyu.carData.config.cacheconfig.CaffeineConfig;

View File

@ -1,4 +1,4 @@
package com.muyu.carData.testcontroller;
package com.muyu.carData.controller;
import com.muyu.carData.config.lotdbconfig.IotDBSessionConfig;
import lombok.extern.log4j.Log4j2;
@ -44,11 +44,13 @@ public class IotDBController {
@GetMapping("/insertData/{insertSize}/{count}")
public String insert(@PathVariable(name = "insertSize") int insertSize,@PathVariable(name = "count") int count) throws IoTDBConnectionException, StatementExecutionException {
Session session = iotDBSessionConfig.iotSession();
//schemaList 属性及类型
List<MeasurementSchema> schemaList = new ArrayList<>();
schemaList.add(new MeasurementSchema("id", TSDataType.INT32));
schemaList.add(new MeasurementSchema("name", TSDataType.TEXT));
schemaList.add(new MeasurementSchema("sex", TSDataType.TEXT));
//tablet 封装数据
Tablet tablet = new Tablet("root.yang.baling", schemaList);
//以当前时间戳作为插入的起始时间戳

View File

@ -1,7 +1,6 @@
package com.muyu.carData.testcontroller;
package com.muyu.carData.controller;
import com.alibaba.fastjson.JSONObject;
import com.muyu.carData.pojo.Student;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
@ -25,21 +24,15 @@ public class KafkaProducerController {
@Autowired
private KafkaProducer kafkaProducer;
private final String topicName = "test";
private final String topicName = "carJsons";
@GetMapping("/produceTest")
public String produceTest() {
@GetMapping("/producer")
public String produceTest(JSONObject data) {
try {
Student stu = Student.builder().id(2)
.name("杨闪闪")
.sex("男")
.build();
String stuStr = JSONObject.toJSONString(stu);
log.info("Topic:{}", topicName);
log.info("Java对象{}",stu);
log.info("转换为JSON{}",stuStr);
log.info("转换为JSON{}",data);
//使用KafkaProducer发送消息
ProducerRecord<String, String> stringProducerRecord = new ProducerRecord<>(topicName, stuStr);
ProducerRecord<String, String> stringProducerRecord = new ProducerRecord(topicName, data);
kafkaProducer.send(stringProducerRecord);
}catch (Exception e){
log.error("Producer写入Topic异常,异常信息是:{}",e.getMessage());
@ -47,4 +40,6 @@ public class KafkaProducerController {
return "消息发送成功";
}
}

View File

@ -1,47 +0,0 @@
package com.muyu.carData.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* @Author
* @Packagecom.muyu.carData.domain
* @Projectcloud-server-8
* @nameIoTDBRecord
* @Date2024/9/27 19:25
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class IoTDBRecord {
/**
*
*/
private String deviceId;
/**
*
*/
private long time = System.currentTimeMillis();
/**
*
*/
private List<String> measurementList;
/**
*
*/
private List<Object> valueList;
/**
*
*/
private List<String> typeList;
}

View File

@ -0,0 +1,22 @@
package com.muyu.carData.event;
import com.alibaba.fastjson.JSONObject;
import org.springframework.context.ApplicationEvent;
/**
* @Author
* @Packagecom.muyu.carData.event
* @Projectcloud-server-8
* @nameEsSaveEvent
* @Date2024/9/29 21:15
*/
public class EsSaveEvent extends ApplicationEvent {
private JSONObject data;
public EsSaveEvent(JSONObject source) {
super(source);
this.data = source;
}
}

View File

@ -1,53 +0,0 @@
package com.muyu.carData.interfaces;
import com.muyu.carData.annotation.IoTTableName;
import com.muyu.carData.domain.IoTDBRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
/**
* @Author
* @Packagecom.muyu.carData.interfaces
* @Projectcloud-server-8
* @nameIoTDBRecordable
* @Date2024/9/27 19:22
* iot
*/
public interface IoTDBRecordable {
Logger logger = LoggerFactory.getLogger(IoTDBRecordable.class);
/**
*
* @return Record
*/
default IoTDBRecord toRecord() throws Exception {
IoTDBRecord ioTDBRecord = new IoTDBRecord();
Object getIoTDBTime = this.getClass().getMethod("getIoTDBTime").invoke(this);
if (null != getIoTDBTime){
ioTDBRecord.setTime((Long) getIoTDBTime);
}
Class aClass = this.getClass();
IoTTableName name = this.getClass().getAnnotation(IoTTableName.class);
ioTDBRecord.setDeviceId(name.value());
Field[] declaredFields = aClass.getDeclaredFields();
ArrayList<String> measurements = new ArrayList<>();
ArrayList<Object> records = new ArrayList<>();
ArrayList<String> types = new ArrayList<>();
for (Field declaredField : declaredFields) {
measurements.add(declaredField.getName());
String methodNamePro = declaredField.getName().substring(0, 1).toUpperCase() + declaredField.getName().substring(1);
Method methodName = this.getClass().getMethod("get" + methodNamePro);
records.add(methodName.invoke(this));
types.add(methodName.getReturnType().getName());
}
ioTDBRecord.setMeasurementList(measurements);
ioTDBRecord.setValueList(records);
ioTDBRecord.setTypeList(types);
return ioTDBRecord;
}
}

View File

@ -0,0 +1,22 @@
package com.muyu.carData.listener;
import com.muyu.carData.event.EsSaveEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
/**
* @Author
* @Packagecom.muyu.carData.listener
* @Projectcloud-server-8
* @nameCustomEventListener
* @Date2024/9/29 23:49
*/
@Component
public class CustomEventListener {
@EventListener
public void handMyEvent(EsSaveEvent event){
//处理事件详情
}
}

View File

@ -0,0 +1,20 @@
package com.muyu.carData.listener;
import com.muyu.carData.event.EsSaveEvent;
import lombok.extern.log4j.Log4j2;
import org.springframework.context.ApplicationListener;
/**
* @Author
* @Packagecom.muyu.carData.listener
* @Projectcloud-server-8
* @nameMyListener
* @Date2024/9/29 21:18
*/
@Log4j2
public class MyListener implements ApplicationListener<EsSaveEvent> {
@Override
public void onApplicationEvent(EsSaveEvent event) {
log.info("监听到自定义事件........");
}
}

View File

@ -0,0 +1,29 @@
package com.muyu.carData.pulisher;
import com.alibaba.fastjson.JSONObject;
import com.muyu.carData.event.EsSaveEvent;
import lombok.AllArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.stereotype.Component;
/**
* @Author
* @Packagecom.muyu.carData.pulisher
* @Projectcloud-server-8
* @nameCustomEventPublisher
* @Date2024/9/29 23:51
*/
@Log4j2
@Component
@AllArgsConstructor
public class CustomEventPublisher {
private ApplicationEventPublisher applicationEventPublisher;
public void publish(JSONObject data){
EsSaveEvent esSaveEvent = new EsSaveEvent(data);
applicationEventPublisher.publishEvent(esSaveEvent);
log.info("事件发布成功 - 消息是:{}",data);
}
}

View File

@ -0,0 +1,2 @@
Spring Boot Version: ${spring-boot.version}
Spring Application Name: ${spring.application.name}

View File

@ -0,0 +1,63 @@
# Tomcat
server:
port: 9702
# nacos线上地址
nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
# SPRING_AMQP_DESERIALIZATION_TRUST_ALL=true spring.amqp.deserialization.trust.all
# Spring
spring:
iotdb:
username: root
password: root
ip: 60.204.221.52
port: 6667
maxSize: 100
fetchSize: 10000
main:
allow-bean-definition-overriding: true
application:
# 应用名称
name: cloud-carData
profiles:
# 环境配置
active: dev
cloud:
nacos:
discovery:
# 服务注册地址
server-addr: ${nacos.addr}
# nacos用户名
username: ${nacos.user-name}
# nacos密码
password: ${nacos.password}
# 命名空间
namespace: ${nacos.namespace}
config:
# 服务注册地址
server-addr: ${nacos.addr}
# nacos用户名
username: ${nacos.user-name}
# nacos密码
password: ${nacos.password}
# 命名空间
namespace: ${nacos.namespace}
# 配置文件格式
file-extension: yml
# 共享配置
shared-configs:
# 系统共享配置
- application-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# 系统环境Config共享配置
- application-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# xxl-job 配置文件
- application-xxl-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
# rabbit 配置文件
- application-rabbit-config-${spring.profiles.active}.${spring.cloud.nacos.config.file-extension}
logging:
level:
com.muyu.system.mapper: DEBUG

View File

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-carData"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-carData"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 使用gRpc将日志发送到skywalking服务端 -->
<appender name="GRPC_LOG" class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.log.GRPCLogClientAppender">
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">
<Pattern>${log.sky.pattern}</Pattern>
</layout>
</encoder>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-carData"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 使用gRpc将日志发送到skywalking服务端 -->
<appender name="GRPC_LOG" class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.log.GRPCLogClientAppender">
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">
<Pattern>${log.sky.pattern}</Pattern>
</layout>
</encoder>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -11,6 +11,7 @@ import java.util.function.Supplier;
/**
*
*
* @author 17353
*/
@Data
@ -18,7 +19,7 @@ import java.util.function.Supplier;
@NoArgsConstructor
@Builder
@Tag(name = "车辆报文模板实体类")
@TableName(value = "car_message",autoResultMap = true)
@TableName(value = "car_message", autoResultMap = true)
public class CarMessage {
//报文类型模块表
/**
@ -68,8 +69,11 @@ public class CarMessage {
*/
private Integer carMessageState;
private String carMessageName;
/**
*
*
* @param carMessage
* @param supplier
* @return
@ -92,6 +96,7 @@ public class CarMessage {
/**
*
*
* @param carMessage
* @return
*/
@ -127,7 +132,4 @@ public class CarMessage {
*/
}

View File

@ -0,0 +1,17 @@
package com.muyu.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class KafKaData {
private String key;
private String type;
private String label;
private String value;
}

View File

@ -28,11 +28,22 @@
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<!-- mqttv3 -->
<dependency>
<groupId>org.eclipse.paho</groupId>
<artifactId>org.eclipse.paho.client.mqttv3</artifactId>
<version>1.2.2</version>
</dependency>
<!-- SpringCloud Alibaba Nacos Config -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
</dependency>
<!--apache.kafka<-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<!-- SpringCloud Alibaba Sentinel -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
@ -83,6 +94,11 @@
<artifactId>pagehelper</artifactId>
<version>6.0.0</version>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-kafka</artifactId>
<version>3.6.3</version>
</dependency>
</dependencies>
<build>
<finalName>cloud-electronic</finalName>

View File

@ -37,7 +37,9 @@ public class CarFenceClazzController {
@Operation(summary = "查询数据",description = "查询数据")
public Result<List<CarFenceClazz>> selectConnect(){
List<CarFenceClazz> connects = carFenceClazzService.list();
log.info("查询数据成功");
return Result.success(
connects, "操作成功"
);
}

View File

@ -43,6 +43,7 @@ public class CarFenceController {
@Validated @RequestBody CarFenceReq req
){
Page<CarFenceResp> connects = carFenceService.selectCarFence(req);
log.info("查询数据:"+ connects);
return Result.success(
connects, "操作成功"
);
@ -57,11 +58,13 @@ public class CarFenceController {
@Validated @RequestBody CarFence carFence
){
Boolean connects = carFenceService.addCarFence(carFence);
log.info("shd");
return connects?Result.success(
null, "操作成功"
):Result.success(
null, "操作失败"
);
}
/**
@ -73,6 +76,8 @@ public class CarFenceController {
@Validated @RequestBody CarGroupReq req
){
Boolean connects = carFenceService.addCarGroup(req);
log.info("添加数据:"+ connects);
return connects?Result.success(
null, "操作成功"
):Result.success(

View File

@ -9,6 +9,8 @@ import com.muyu.domain.req.CarInformationListReq;
import com.muyu.domain.req.CarInformationUpdReq;
import com.muyu.domain.resp.CarInformationResp;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@ -18,4 +20,12 @@ import java.util.List;
@Mapper
public interface CarInformationMapper extends MPJBaseMapper<CarInformation> {
/**
* VIN()
* @param carInformationVIN
* @return
*/
@Select("SELECT `car_information`.car_Information_Type FROM `car_information` WHERE `car_information`.car_information_VIN = #{carInformationVIN}")
Long selectcarMessageCartype(@Param("carInformationVIN") String carInformationVIN);
}

View File

@ -1,7 +1,9 @@
package com.muyu.server.service;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.extension.service.IService;
import com.muyu.domain.CarMessage;
import com.muyu.domain.resp.CarInformationResp;
import com.muyu.domain.resp.CarMessageResp;
import java.util.List;
@ -43,4 +45,12 @@ public interface CarMessageService extends IService<CarMessage> {
*/
List<CarMessageResp> selectJoinList(Long id);
//报文切割
/**
*
*
*
*/
JSONObject inciseCarMessage(String testString);
}

View File

@ -1,5 +1,8 @@
package com.muyu.server.service.impl;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.yulichang.wrapper.MPJLambdaWrapper;
import com.muyu.common.core.utils.StringUtils;
@ -7,15 +10,21 @@ import com.muyu.domain.CarMessage;
import com.muyu.domain.CarMessageType;
import com.muyu.domain.CarType;
import com.muyu.domain.resp.CarMessageResp;
import com.muyu.server.mapper.CarInformationMapper;
import com.muyu.server.mapper.CarMessageMapper;
import com.muyu.server.service.CarMessageService;
import lombok.extern.log4j.Log4j2;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
/**
*
*/
@Log4j2
@Service
public class CarMessageServiceImpl
extends ServiceImpl<CarMessageMapper,CarMessage>
@ -23,6 +32,11 @@ public class CarMessageServiceImpl
@Resource
private CarMessageMapper carMessageMapper;
@Resource
private CarInformationMapper carInformationMapper;
/**
*
* @param
@ -91,5 +105,81 @@ public class CarMessageServiceImpl
.eq(StringUtils.isNotNull(id),CarMessage::getCarMessageCartype, id));
}
@Override
public JSONObject inciseCarMessage(String testString) {
return null;
}
//报文处理
// @Resource
// private RedisTemplate<String ,Objects > redisTemplate;
//
// /**
// * 报文解析
// * @param testString
// * @return
// */
// @Override
// public JSONObject inciseCarMessage(String testString) {
// //根据空格拆分切割数据字符串
// String[] split = testString.split(" ");
// StringBuilder stringBuilder = new StringBuilder();
// for (String conversion : split) {
// //将16进制字符串转换为对应的10进制
// int inciseindex = Integer.parseInt(conversion, 16);
// // 将10进制转换为对应的字符
// stringBuilder.append((char) inciseindex);
// }
// //切取车辆VIN
// String substring = stringBuilder.substring(1, 18);
// log.info("车辆的VIN码:" + substring);
// //根据给定的vehicleVin车辆VIN号获取对应的模板车辆分类carMessageCartype
// String selectcared = carInformationMapper.selectcarMessageCartype(substring);
// //创建接受数据的数组
// List<CarMessage> messagesList ;
//
// try{
// String redisKey = "carMessageList" + selectcared;
//
// if (redisTemplate.hasKey(redisKey)){
// List<Objects> list = redisTemplate.opsForList().range(redisKey , 0, -1);
// messagesList = list.stream()
// .map(objects -> JSON.parseObject(objects.toString(), CarMessage.class))
// .toList();
// log.info("Redis缓存查询成功");
// }else {
// messagesList = carInformationMapper.selectcarMessageCartype(selectcared);
//
// messagesList.forEach(
// listReq -> redisTemplate.opsForList().rightPushAll(redisKey, JSON.toString(listReq) )
// );
// log.info("数据库查询成功");
// }
// }catch(Exception e){
// throw new RuntimeException("获取报文模板失败");
// }
// //判断报文模板 列表 不为空
// if(messagesList.isEmpty()){
// throw new RuntimeException("报文模版为空");
// }
// //存储报文模板解析后的数据
// JSONObject jsonObject = new JSONObject();
// for (CarMessage carMessage : messagesList) {
// //起始位下标
// Integer startIndex = carMessage.getCarMessageStartIndex();
// //结束位下标
// Integer endIndex = carMessage.getCarMessageEndIndex();
// //根据报文模板获取保温截取位置
// String value = stringBuilder.substring(startIndex, endIndex);
// //存入数据
// jsonObject.put(carMessage.getMessageTypeName(), value);
//
// }
// return jsonObject;
//
// }
}

View File

@ -26,7 +26,7 @@ public class MessageDao {
}
public void sendMessage(Message message) throws Exception{
String sql="INSERT INTO `eight`.`car_fault_message` (`id`, `sender`, `receiver`, `content`, `status`, `create_time`, `user_id`) " +
String sql="INSERT INTO `xxy`.`car_fault_message` (`id`, `sender`, `receiver`, `content`, `status`, `create_time`, `user_id`) " +
"VALUES (NULL, NULL, NULL, NULL, NULL, NULL, NULL)";
try(PreparedStatement pstmt=connection.prepareStatement(sql)){
pstmt.setString(1, message.getContent());

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# Spring
spring:

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# Spring
spring:

View File

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.muyu</groupId>
<artifactId>cloud-modules</artifactId>
<version>3.6.3</version>
</parent>
<artifactId>cloud-modules-protocolparsing</artifactId>
<dependencies>
<!--远调端Feign-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-openfeign</artifactId>
</dependency>
<!-- mqttv3 -->
<dependency>
<groupId>org.eclipse.paho</groupId>
<artifactId>org.eclipse.paho.client.mqttv3</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<!-- SpringCloud Alibaba Nacos Config -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
</dependency>
<!--apache.kafka<-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<!-- SpringCloud Alibaba Sentinel -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-sentinel</artifactId>
</dependency>
<!-- SpringBoot Actuator -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- Mysql Connector -->
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<!-- MuYu Common DataSource -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-datasource</artifactId>
</dependency>
<!-- MuYu Common DataScope -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-datascope</artifactId>
</dependency>
<!-- MuYu Common Log -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-log</artifactId>
</dependency>
<!-- 接口模块 -->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-api-doc</artifactId>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-modules-enterprise-common</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.github.yulichang</groupId>
<artifactId>mybatis-plus-join-boot-starter</artifactId>
<version>1.4.13</version>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper</artifactId>
<version>6.0.0</version>
</dependency>
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-kafka</artifactId>
<version>3.6.3</version>
</dependency>
</dependencies>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
</project>

View File

@ -0,0 +1,15 @@
package com.muyu;
import com.muyu.common.security.annotation.EnableMyFeignClients;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@EnableMyFeignClients
@SpringBootApplication
public class ProtocolApplication {
public static void main(String[] args) {
SpringApplication.run(ProtocolApplication.class, args);
}
}

View File

@ -0,0 +1,38 @@
package com.muyu.controller;
import com.muyu.common.core.domain.Result;
import com.muyu.domain.CarMessage;
import com.muyu.service.CarMessageService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.extern.log4j.Log4j2;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.List;
/**
*
*/
@Log4j2
@RestController
@RequestMapping("/carMessage")
@Tag(name = "信息报文模块" )
public class CarMessageController {
/**
*
*/
}

View File

@ -0,0 +1,135 @@
package com.muyu.domain;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.function.Supplier;
/**
*
*
* @author 17353
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
@Tag(name = "车辆报文模板实体类")
@TableName(value = "car_message", autoResultMap = true)
public class CarMessage {
//报文类型模块表
/**
*
*/
private Long messageTypeId;
/**
*
*/
private String messageTypeCode;
/**
*
*/
private String messageTypeName;
/**
*
*/
private String messageTypeBelongs;
//报文拆分位置主表
/**
*
*/
private Long carMessageId;
/**
*
*/
private Integer carMessageCartype;
/**
*
*/
private Integer carMessageType;
/**
*
*/
private Integer carMessageStartIndex;
/**
*
*/
private Integer carMessageEndIndex;
/**
* ( )
*/
private String messageTypeClass;
/**
* (0 1)
*/
private Integer carMessageState;
private String carMessageName;
/**
*
*
* @param carMessage
* @param supplier
* @return
*/
public static CarMessage carMessageUpdBuilder(CarMessage carMessage, Supplier<Long> supplier) {
return CarMessage.builder()
.messageTypeId(supplier.get())
.messageTypeCode(carMessage.messageTypeCode)
.messageTypeName(carMessage.messageTypeName)
.messageTypeBelongs(carMessage.messageTypeBelongs)
.carMessageId(carMessage.carMessageId)
.carMessageCartype(carMessage.carMessageCartype)
.carMessageType(carMessage.carMessageType)
.carMessageStartIndex(carMessage.carMessageStartIndex)
.carMessageEndIndex(carMessage.carMessageEndIndex)
.messageTypeClass(carMessage.messageTypeClass)
.carMessageState(carMessage.carMessageState)
.build();
}
/**
*
*
* @param carMessage
* @return
*/
public static CarMessage carMessageAddBuilder(CarMessage carMessage) {
return CarMessage.builder()
.messageTypeCode(carMessage.messageTypeCode)
.messageTypeName(carMessage.messageTypeName)
.messageTypeBelongs(carMessage.messageTypeBelongs)
.carMessageId(carMessage.carMessageId)
.carMessageCartype(carMessage.carMessageCartype)
.carMessageType(carMessage.carMessageType)
.carMessageStartIndex(carMessage.carMessageStartIndex)
.carMessageEndIndex(carMessage.carMessageEndIndex)
.messageTypeClass(carMessage.messageTypeClass)
.carMessageState(carMessage.carMessageState)
.build();
}
/*
private Long messageTypeId ;
private String messageTypeCode ;
private String messageTypeName ;
private String messageTypeBelongs ;
private String messageTypeClass ;
private Long carMessageId ;
private Integer carMessageCartype ;
private Integer carMessageType ;
private Integer carMessageStartIndex ;
private Integer carMessageEndIndex ;
private Integer carMessageState ;
*/
}

View File

@ -0,0 +1,43 @@
package com.muyu.domain;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
*
* @Author
* @Packagecom.muyu.warn.domain.car
* @Projectcloud-server-8
* @nameCarMessage
* @Date2024/9/22 3:07
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Tag(name = "车辆报文所属类型")
@TableName(value = "car_message_type",autoResultMap = true)
public class CarMessageType {
/**
*
*/
private Integer messageTypeId ;
/**
*
*/
private String messageTypeCode ;
/**
*
*/
private String messageTypeName ;
/**
*
*/
private String messageTypeBelongs ;
}

View File

@ -0,0 +1,17 @@
package com.muyu.domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class KafKaData {
private String key;
private String type;
private String label;
private String value;
}

View File

@ -0,0 +1,57 @@
package com.muyu.domain.resp;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* ()
* @Author
* @Packagecom.muyu.warn.domain.car
* @Projectcloud-server-8
* @nameresp
* @Date2024/9/22 7:12
*/
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Tag(name = "车辆报文信息")
public class CarMessageResp {
/**
*
*/
private Integer carMessageId ;
/**
*
*/
private Integer carMessageCartype ;
/**
*
*/
private Integer carMessageType ;
/**
*
*/
private String messageTypeName ;
/**
*
*/
private Integer carMessageStartIndex ;
/**
*
*/
private Integer carMessageEndIndex ;
/**
* ( )
*/
private String messageTypeClass ;
/**
* (0 1)
*/
private Integer carMessageState ;
}

View File

@ -0,0 +1,19 @@
package com.muyu.feign;
import com.alibaba.fastjson2.JSONObject;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
@FeignClient(name = "cloud-modules-carData")
public interface KafkaClient {
/**
* "/produceTest"GET
*
* @return String
* @return
*/
@GetMapping("/produce/producer")
public abstract String producerKafka(JSONObject data);
}

View File

@ -0,0 +1,24 @@
package com.muyu.mapper;
import com.github.yulichang.base.MPJBaseMapper;
import com.muyu.domain.CarInformation;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
/**
*
*/
@Mapper
public interface CarInformationMapper extends MPJBaseMapper<CarInformation> {
/**
* VIN()
* @param carInformationVIN
* @return
*/
@Select("SELECT `car_information`.car_Information_Type FROM `car_information` WHERE `car_information`.car_information_VIN = #{carInformationVIN}")
Long selectcarMessageCartype(@Param("carInformationVIN") String carInformationVIN);
}

View File

@ -0,0 +1,17 @@
package com.muyu.mapper;
import com.github.yulichang.base.MPJBaseMapper;
import com.muyu.domain.CarMessage;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
*
*/
@Mapper
public interface CarMessageMapper extends MPJBaseMapper<CarMessage> {
}

View File

@ -0,0 +1,12 @@
package com.muyu.mapper;
import com.muyu.domain.CarMessage;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface CarMessagePlusMapper {
List<CarMessage> selectCarMessageList(@Param("carMessageCartype") Integer carMessageCartype);
}

View File

@ -0,0 +1,8 @@
package com.muyu.mapper;
import com.github.yulichang.base.MPJBaseMapper;
import com.muyu.domain.CarMessageType;
public interface CarMessageTypeMapper extends MPJBaseMapper<CarMessageType> {
}

View File

@ -0,0 +1,104 @@
package com.muyu.server.mqtt;
import com.alibaba.fastjson.JSONObject;
import com.muyu.domain.CarMessage;
import com.muyu.server.service.CarMessageService;
import jakarta.annotation.PostConstruct;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.eclipse.paho.client.mqttv3.*;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
@Component
public class Demo {
@Resource
private CarMessageService service;
@Resource
private KafkaProducer<String, String> kafkaProducer;
@PostConstruct
public void test() {
String topic = "vehicle";
String content = "Message from MqttPublishSample";
int qos = 2;
String broker = "tcp://106.15.136.7:1883";
String clientId = "JavaSample";
try {
// 第三个参数为空,默认持久化策略
MqttClient sampleClient = new MqttClient(broker, clientId);
MqttConnectOptions connOpts = new MqttConnectOptions();
connOpts.setCleanSession(true);
System.out.println("Connecting to broker: "+broker);
sampleClient.connect(connOpts);
sampleClient.subscribe(topic,0);
sampleClient.setCallback(new MqttCallback() {
// 连接丢失
@Override
public void connectionLost(Throwable throwable) {
}
// 连接成功
@Override
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
List<CarMessage> list= service.selectCarMessageList(1,2);
String str = new String( mqttMessage.getPayload() );
System.out.println(str);
String[] test = str.split(" ");
String[] results = new String[list.size()];
List<CompletableFuture<String>> futures = new ArrayList<>();
for (CarMessage carmsg : list) {
futures.add(CompletableFuture.supplyAsync(() -> {
int startIndex = Integer.parseInt(String.valueOf(carmsg.getCarMessageStartIndex())) - 1;
int endIndex = Integer.parseInt(String.valueOf(carmsg.getCarMessageEndIndex()));
StringBuilder hexBuilder = new StringBuilder();
for (int j = startIndex; j < endIndex; j++) {
hexBuilder.append(test[j]);
}
// 创建16进制的对象
String hex = hexBuilder.toString();
// 转橙字符数组
char[] result = new char[hex.length() / 2];
for (int x = 0; x < hex.length(); x += 2) {
// 先转十进制
int high = Character.digit(hex.charAt(x), 16);
// 转二进制
int low = Character.digit(hex.charAt(x + 1), 16);
// 转字符
result[x / 2] = (char) ((high << 4) + low);
}
return new String(result);
}));
}
for (int i = 0; i < futures.size(); i++) {
results[i] = futures.get(i).get();
}
String jsonString = JSONObject.toJSONString( results );
ProducerRecord<String, String> producerRecord = new ProducerRecord<>( "carJsons", jsonString);
kafkaProducer.send(producerRecord);
}
// 接收信息
@Override
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
}
});
} catch(MqttException me) {
System.out.println("reason "+me.getReasonCode());
System.out.println("msg "+me.getMessage());
System.out.println("loc "+me.getLocalizedMessage());
System.out.println("cause "+me.getCause());
System.out.println("excep "+me);
me.printStackTrace();
}
}
}

View File

@ -0,0 +1,118 @@
package com.muyu.mqtt;
import com.alibaba.fastjson2.JSONObject;
import com.muyu.common.kafka.constants.KafkaConstants;
import com.muyu.domain.CarMessage;
import com.muyu.domain.KafKaData;
import com.muyu.service.CarMessageService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.eclipse.paho.client.mqttv3.*;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
/**
* mqtt
*
* @ClassName MqttTest
* @Description
* @Date 2024/9/28 23:49
*/
@Slf4j
@Component
public class MqttTest {
private static final Integer ID = 1;
private static final Integer CODE = 1;
@Resource
private KafkaProducer<String, String> kafkaProducer;
@Resource
private CarMessageService sysCarMessageService;
@PostConstruct
public void Test() {
String topic = "vehicle";
String content = "Message from MqttPublishSample";
int qos = 2;
String broker = "tcp://60.204.221.52:1883";
String clientId = "JavaSample";
try {
// 第三个参数为空,默认持久化策略
MqttClient sampleClient = new MqttClient(broker, clientId);
MqttConnectOptions connOpts = new MqttConnectOptions();
connOpts.setCleanSession(true);
System.out.println("Connecting to broker: " + broker);
sampleClient.connect(connOpts);
sampleClient.subscribe(topic, 0);
sampleClient.setCallback(new MqttCallback() {
// 连接丢失
@Override
public void connectionLost(Throwable throwable) {
}
// 连接成功
@Override
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
List<CarMessage> list = sysCarMessageService.selectCarMessage(1);
String string = new String(mqttMessage.getPayload());
log.info(new String(mqttMessage.getPayload()));
List<KafKaData> kafKaDataList = new ArrayList<>();
String[] test = string.split(" ");
// String[] results = new String[list.size()];
for (CarMessage carMessage : list) {
int startIndex = Integer.parseInt(String.valueOf(carMessage.getCarMessageStartIndex())) - 1;
int endIndex = Integer.parseInt(String.valueOf(carMessage.getCarMessageEndIndex()));
StringBuilder hexBuilder = new StringBuilder();
for (int i = startIndex; i < endIndex; i++) {
hexBuilder.append(test[i]);
}
String hex = hexBuilder.toString();
char[] result = new char[hex.length() / 2];
for (int x = 0; x < hex.length(); x += 2) {
int high = Character.digit(hex.charAt(x), 16);
int low = Character.digit(hex.charAt(x + 1), 16);
result[x / 2] = (char) ((high << 4) + low);
}
String value = new String(result);
kafKaDataList.add(KafKaData.builder()
.key(carMessage.getMessageTypeCode())
.label(carMessage.getMessageTypeCode())
.value(value)
.type("String")
.build());
}
String jsonString = JSONObject.toJSONString(kafKaDataList);
ProducerRecord<String, String> producerRecord = new ProducerRecord<>(KafkaConstants.KafkaTopic, jsonString);
kafkaProducer.send(producerRecord);
log.info("kafka投产{}", jsonString);
// HashMap<String, String> stringStringHashMap = new HashMap<>();
// kafKaDataList.forEach(data -> stringStringHashMap.put(data.getKey(), data.getValue()));
// jsonString = JSONObject.toJSONString(stringStringHashMap);
// System.out.println(jsonString);
}
// 接收信息
@Override
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
}
});
} catch (MqttException me) {
System.out.println("reason " + me.getReasonCode());
System.out.println("msg " + me.getMessage());
System.out.println("loc " + me.getLocalizedMessage());
System.out.println("cause " + me.getCause());
System.out.println("excep " + me);
me.printStackTrace();
}
}
}

View File

@ -0,0 +1,166 @@
package com.muyu.mqtt;//package com.muyu.server.mqtt;
//
//import com.muyu.analysis.parsing.remote.RemoteClientService;
//import com.muyu.common.core.constant.KafkaConstants;
//import com.muyu.common.core.constant.RedisConstants;
//import com.muyu.common.core.domain.Result;
//import com.muyu.enterprise.domain.resp.car.MessageValueListResp;
//import com.muyu.server.feign.KafkaClient;
//import jakarta.annotation.PostConstruct;
//import jakarta.annotation.Resource;
//import cn.hutool.json.JSONObject;
//import com.alibaba.fastjson.JSON;
//import lombok.extern.log4j.Log4j2;
//import org.apache.kafka.clients.producer.KafkaProducer;
//import org.apache.kafka.clients.producer.ProducerRecord;
//import org.eclipse.paho.client.mqttv3.*;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.data.redis.core.RedisTemplate;
//import org.springframework.stereotype.Component;
//
//import java.util.List;
//
///**
// * 协议解析处理数据发送传送到队列
// * @Author李庆帅
// * @Packagecom.muyu.analysis.parsing.MQTT
// * @Projectcloud-server
// * @nameParsingMQTT
// * @Date2024/9/29 16:08
// */
//@Log4j2
//@Component
//public class ParsingMQTT {
//
// @Resource
// private RedisTemplate<String, Object> redisTemplate;
//
// @Autowired
// private KafkaClient remoteServiceClient;
//
// @Resource
// private KafkaProducer<String, String> kafkaProducer;
//
// /**
// * 协议解析
// */
// @PostConstruct
// public void mqttClient() {
// String topic = "vehicle";
// String broker = "tcp://106.15.136.7:1883";
// String clientId = "JavaSample";
//
// try {
// // 第三个参数为空,默认持久化策略
// MqttClient sampleClient = new MqttClient(broker, clientId);
// MqttConnectOptions connOpts = new MqttConnectOptions();
// connOpts.setCleanSession(true);
// System.out.println("Connecting to broker: " + broker);
// sampleClient.connect(connOpts);
// sampleClient.subscribe(topic, 0);
// sampleClient.setCallback(new MqttCallback() {
// // 连接丢失
// @Override
// public void connectionLost(Throwable throwable) {
//
// }
//
// // 连接成功
// @Override
// public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
// System.out.println(new String(mqttMessage.getPayload()));
// JSONObject entries = this.protocolParsing(new String(mqttMessage.getPayload()));
//
// ProducerRecord<String, String> producerRecord = new ProducerRecord<>(KafkaConstants.MESSAGE_PARSING,
// entries.toString() );
// kafkaProducer.send(producerRecord);
// log.info("解析之后的数据"+entries);
//
// }
//
// /**
// * 协议解析
// * @param messageStr
// * @return
// */
// public JSONObject protocolParsing(String messageStr) {
// //根据空格切割数据
// String[] hexArray = messageStr.split(" ");
// StringBuilder result = new StringBuilder();
// //遍历十六进制数据转换为字符
// for (String hex : hexArray) {
// int decimal = Integer.parseInt(hex, 16);
// result.append((char) decimal);
// }
// //取出车辆VIN码
// String vehicleVin = result.substring(1, 18);
// log.info("车辆VIN码: " + vehicleVin);
// //根据车辆VIN码查询报文模板ID
// Result<Long> byVehicleVin = remoteServiceClient.findByVehicleVin(vehicleVin);
// Long templateId = byVehicleVin.getData();
// List<MessageValueListResp> templateList;
// //从redis缓存中获取报文模板数据
// try {
// String redisKey = RedisConstants.MESSAGE_TEMPLATE + templateId;
// if (redisTemplate.hasKey(redisKey)) {
// List<Object> list = redisTemplate.opsForList().range(redisKey, 0, -1);
// templateList = list.stream()
// .map(obj -> JSON.parseObject(obj.toString(), MessageValueListResp.class))
// .toList();
// log.info("Redis缓存查询成功");
// } else {
// Result<List<MessageValueListResp>> byTemplateId = remoteServiceClient.producerKafka(templateId);
// templateList = byTemplateId.getData();
// templateList.forEach(
// listResp ->
// redisTemplate.opsForList().rightPush(
// redisKey, JSON.toJSONString(listResp)
// )
// );
// log.info("数据库查询成功:"+byTemplateId);
// }
// } catch (Exception e) {
// log.info("获取报文模板失败");
// throw new RuntimeException("获取报文模板失败");
// }
// //判断报文模板列表不为空
// if (templateList.isEmpty()) {
// log.info("报文模版为空");
// throw new RuntimeException("报文模版为空");
// }
// //存储报文模版解析后的数据
// JSONObject jsonObject = new JSONObject();
// for (MessageValueListResp messageValue : templateList) {
// //起始位下标
// Integer startIndex = messageValue.getMessageStartIndex() - 1;
// //结束位下标
// Integer endIndex = messageValue.getMessageEndIndex();
// //根据报文模版截取数据
// String value = result.substring(startIndex, endIndex);
// //存入数据
// jsonObject.put(messageValue.getMessageLabel(), value);
// }
// return jsonObject;
// }
//
// // 接收信息
// @Override
// public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
//
// }
// });
// } catch (MqttException me) {
// log.info("reason " + me.getReasonCode());
// log.info("msg " + me.getMessage());
// log.info("loc " + me.getLocalizedMessage());
// log.info("cause " + me.getCause());
// log.info("excep " + me);
// me.printStackTrace();
// }
// }
//
//
//
//
//
//}

View File

@ -0,0 +1,35 @@
package com.muyu.service;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.extension.service.IService;
import com.muyu.domain.CarMessage;
import com.muyu.domain.resp.CarMessageResp;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
*
*/
public interface CarMessageService extends IService<CarMessage> {
//报文切割
/**
*
*
*
*/
JSONObject inciseCarMessage(String testString);
List<CarMessage> selectCarMessageList(int modelCode);
List<CarMessage> selectCarMessage(@Param("carMessageCartype") Integer carMessageCartype);
List<CarMessage> selectCarMessageListAll(int i);
}

View File

@ -0,0 +1,134 @@
package com.muyu.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.yulichang.wrapper.MPJLambdaWrapper;
import com.muyu.common.core.utils.StringUtils;
import com.muyu.domain.CarMessage;
import com.muyu.domain.resp.CarMessageResp;
import com.muyu.mapper.CarInformationMapper;
import com.muyu.mapper.CarMessageMapper;
import com.muyu.mapper.CarMessagePlusMapper;
import com.muyu.service.CarMessageService;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
/**
*
*/
@Log4j2
@Service
public class CarMessageServiceImpl
extends ServiceImpl<CarMessageMapper, CarMessage>
implements CarMessageService {
@Resource
private CarMessageMapper carMessageMapper;
@Resource
private CarMessagePlusMapper carMessagePlusMapper;
@Resource
private CarInformationMapper carInformationMapper;
@Override
public List<CarMessage> selectCarMessageList(int modelCode) {
LambdaQueryWrapper<CarMessage> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(CarMessage::getCarMessageCartype, modelCode);
return this.list(queryWrapper);
}
@Override
public List<CarMessage> selectCarMessage(Integer carMessageCartype) {
return carMessagePlusMapper.selectCarMessageList(carMessageCartype);
}
@Override
public List<CarMessage> selectCarMessageListAll(int i) {
LambdaQueryWrapper<CarMessage> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(CarMessage::getCarMessageCartype, i);
return this.list(queryWrapper);
}
@Override
public JSONObject inciseCarMessage(String testString) {
return null;
}
//报文处理
// @Resource
// private RedisTemplate<String ,Objects > redisTemplate;
//
// /**
// * 报文解析
// * @param testString
// * @return
// */
// @Override
// public JSONObject inciseCarMessage(String testString) {
// //根据空格拆分切割数据字符串
// String[] split = testString.split(" ");
// StringBuilder stringBuilder = new StringBuilder();
// for (String conversion : split) {
// //将16进制字符串转换为对应的10进制
// int inciseindex = Integer.parseInt(conversion, 16);
// // 将10进制转换为对应的字符
// stringBuilder.append((char) inciseindex);
// }
// //切取车辆VIN
// String substring = stringBuilder.substring(1, 18);
// log.info("车辆的VIN码:" + substring);
// //根据给定的vehicleVin车辆VIN号获取对应的模板车辆分类carMessageCartype
// String selectcared = carInformationMapper.selectcarMessageCartype(substring);
// //创建接受数据的数组
// List<CarMessage> messagesList ;
//
// try{
// String redisKey = "carMessageList" + selectcared;
//
// if (redisTemplate.hasKey(redisKey)){
// List<Objects> list = redisTemplate.opsForList().range(redisKey , 0, -1);
// messagesList = list.stream()
// .map(objects -> JSON.parseObject(objects.toString(), CarMessage.class))
// .toList();
// log.info("Redis缓存查询成功");
// }else {
// messagesList = carInformationMapper.selectcarMessageCartype(selectcared);
//
// messagesList.forEach(
// listReq -> redisTemplate.opsForList().rightPushAll(redisKey, JSON.toString(listReq) )
// );
// log.info("数据库查询成功");
// }
// }catch(Exception e){
// throw new RuntimeException("获取报文模板失败");
// }
// //判断报文模板 列表 不为空
// if(messagesList.isEmpty()){
// throw new RuntimeException("报文模版为空");
// }
// //存储报文模板解析后的数据
// JSONObject jsonObject = new JSONObject();
// for (CarMessage carMessage : messagesList) {
// //起始位下标
// Integer startIndex = carMessage.getCarMessageStartIndex();
// //结束位下标
// Integer endIndex = carMessage.getCarMessageEndIndex();
// //根据报文模板获取保温截取位置
// String value = stringBuilder.substring(startIndex, endIndex);
// //存入数据
// jsonObject.put(carMessage.getMessageTypeName(), value);
//
// }
// return jsonObject;
//
// }
}

View File

@ -0,0 +1,2 @@
Spring Boot Version: ${spring-boot.version}
Spring Application Name: ${spring.application.name}

View File

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-electronic"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-electronic"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 使用gRpc将日志发送到skywalking服务端 -->
<appender name="GRPC_LOG" class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.log.GRPCLogClientAppender">
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">
<Pattern>${log.sky.pattern}</Pattern>
</layout>
</encoder>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/cloud-electronic"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<property name="log.sky.pattern" value="%d{HH:mm:ss.SSS} %yellow([%tid]) [%thread] %-5level %logger{20} - [%method,%line] - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.sky.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 使用gRpc将日志发送到skywalking服务端 -->
<appender name="GRPC_LOG" class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.log.GRPCLogClientAppender">
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<layout class="org.apache.skywalking.apm.toolkit.log.logback.v1.x.TraceIdPatternLogbackLayout">
<Pattern>${log.sky.pattern}</Pattern>
</layout>
</encoder>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.muyu" level="info"/>
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn"/>
<root level="info">
<appender-ref ref="GRPC_LOG"/>
<appender-ref ref="console"/>
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info"/>
<appender-ref ref="file_error"/>
</root>
</configuration>

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<!-- 1.在mybats的开发中namespace有特殊的意思一定要是对应接口的全限定名通过namespace可以简历mapper.xml和接口之间的关系(名字不重要,位置不重要)-->
<mapper namespace="com.muyu.mapper.CarMessagePlusMapper">
<select id="selectCarMessageList" resultType="com.muyu.domain.CarMessage">
select * from `car_message` where car_message_cartype = #{carMessageCartype}
</select>
</mapper>

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# SPRING_AMQP_DESERIALIZATION_TRUST_ALL=true spring.amqp.deserialization.trust.all
# Spring
spring:

View File

@ -14,6 +14,7 @@
<module>cloud-modules-gen</module>
<module>cloud-modules-file</module>
<module>cloud-modules-carData</module>
<module>cloud-modules-protocolparsing</module>
</modules>
<artifactId>cloud-modules</artifactId>

View File

@ -7,7 +7,7 @@ nacos:
addr: 159.75.188.178:8848
user-name: nacos
password: nacos
namespace: eight
namespace: xxy
# Spring
spring:

10
pom.xml
View File

@ -10,7 +10,7 @@
<name>muyu</name>
<url>http://www.muyu.vip</url>
<description>若依微服务系统</description>
<description>智能车联系统</description>
<properties>
<muyu.version>3.6.3</muyu.version>
@ -42,6 +42,7 @@
<hutool.version>5.8.27</hutool.version>
<knife4j-openapi3.version>4.1.0</knife4j-openapi3.version>
<xxl-job-core.version>2.4.1</xxl-job-core.version>
<kafka.version>3.6.3</kafka.version>
</properties>
<!-- 依赖声明 -->
@ -218,6 +219,13 @@
<version>${muyu.version}</version>
</dependency>
<!--kafka-->
<dependency>
<groupId>com.muyu</groupId>
<artifactId>cloud-common-kafka</artifactId>
<version>${kafka.version}</version>
</dependency>
<!-- 分布式事务 -->
<dependency>
<groupId>com.muyu</groupId>