cjl
2023-07-10 626b1c85571181031f62cdf24a9392f4dea68c82
chore: 提交测试
5 files modified
410 ■■■■ changed files
screen-api/src/main/resources/application-dev.yml 2 ●●● patch | view | raw | blame | history
screen-manage/src/main/java/com/moral/api/config/kafka/KafkaConsumerConfig.java 2 ●●● patch | view | raw | blame | history
screen-manage/src/main/java/com/moral/api/config/kafka/KafkaProducerConfig.java 2 ●●● patch | view | raw | blame | history
screen-manage/src/main/java/com/moral/api/kafka/consumer/DeviceConsumer.java 398 ●●●● patch | view | raw | blame | history
screen-manage/src/main/resources/application-qa.yml 6 ●●●● patch | view | raw | blame | history
screen-api/src/main/resources/application-dev.yml
@@ -54,7 +54,7 @@
    filters: stat
    type: com.alibaba.druid.pool.DruidDataSource
    max-wait: 60000
    url: jdbc:mysql://rm-bp1pr3rx9m3fnkwsks.mysql.rds.aliyuncs.com:3306/moral?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai
    url: jdbc:mysql://rm-bp1pr3rx9m3fnkwsk.mysql.rds.aliyuncs.com:3306/moral?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai
    username: moral_qx
    password: moral_qx_12345
    test-on-borrow: false
screen-manage/src/main/java/com/moral/api/config/kafka/KafkaConsumerConfig.java
@@ -19,7 +19,7 @@
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
    @Value("${kafka.consumer.servers}")
    @Value("47.99.145.48:9092,47.111.116.38:9092,47.111.124.159:9092")
    private String servers;
    @Value("${kafka.consumer.enable.auto.commit}")
    private boolean enableAutoCommit;
screen-manage/src/main/java/com/moral/api/config/kafka/KafkaProducerConfig.java
@@ -16,7 +16,7 @@
/*@Configuration
@EnableKafka*/
public class KafkaProducerConfig {
    @Value("${kafka.producer.servers}")
    @Value("47.99.145.48:9092,47.111.116.38:9092,47.111.124.159:9092")
    private String servers;
    @Value("${kafka.producer.retries}")
    private int retries;
screen-manage/src/main/java/com/moral/api/kafka/consumer/DeviceConsumer.java
@@ -1,201 +1,201 @@
//package com.moral.api.kafka.consumer;
//
//import com.moral.api.service.*;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.data.redis.core.RedisTemplate;
//import org.springframework.kafka.annotation.KafkaListener;
//import org.springframework.kafka.support.Acknowledgment;
//import org.springframework.stereotype.Component;
//import org.springframework.util.ObjectUtils;
//
//import java.util.HashMap;
//import java.util.Iterator;
//import java.util.Map;
//
//import com.alibaba.fastjson.JSON;
//import com.moral.constant.KafkaConstants;
//import com.moral.constant.RedisConstants;
//
//
//
//@Component
//@Slf4j
//public class DeviceConsumer {
//
//    @Autowired
//    private HistoryMinutelyService historyMinutelyService;
//
//    @Autowired
//    private HistoryHourlyService historyHourlyService;
//
//    @Autowired
//    private DeviceService deviceService;
//
//    @Autowired
//    private RedisTemplate redisTemplate;
//
//    @Autowired
//    private HistorySecondCruiserService historySecondCruiserService;
//
//    @Autowired
//    private HistorySecondUavService historySecondUavService;
//
//    //分钟数据
//    @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, containerFactory = "insertListenerContainerFactory")
//    public void listenMinute(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        String msg = record.value();
//        try {
//            Map<String, Object> data = JSON.parseObject(msg, Map.class);
//            Object mac = data.get("mac");
//            Object time = data.get("DataTime");
//            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
//                log.warn("some properties is null, param{}", msg);
//                ack.acknowledge();
//                return;
//            }
//
//            //数据过滤
////            data.remove("time");
//            data.remove("entryTime");
//            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
//            Map<String, Object> newMap = new HashMap<>();
//            Map.Entry<String, Object> next;
//            while (iterator.hasNext()) {
//                next = iterator.next();
//                String key = next.getKey();
//                Object value = next.getValue();
//                if (key.contains("-Avg")) {
//                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
//                } else {
//                    newMap.put(key, value);
//                }
//                iterator.remove();
//            }
//            //存入数据库
//            historyMinutelyService.insertHistoryMinutely(newMap);
//            ack.acknowledge();
//        } catch (Exception e) {
//            log.error("param{}" + msg);
//        }
//    }
//
//    //小时数据
//    @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, containerFactory = "insertListenerContainerFactory")
//    public void listenHour(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        String msg = record.value();
//        try {
//            Map<String, Object> data = JSON.parseObject(msg, Map.class);
//            Object mac = data.get("mac");
//            Object time = data.get("DataTime");
//            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
//                log.warn("some properties is null, param{}", msg);
//                ack.acknowledge();
//                return;
//            }
//
//            //数据过滤
////            data.remove("time");
//            data.remove("entryTime");
//            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
//            Map<String, Object> newMap = new HashMap<>();
//            Map.Entry<String, Object> next;
//            while (iterator.hasNext()) {
//                next = iterator.next();
//                String key = next.getKey();
//                Object value = next.getValue();
//                if (key.contains("-Avg")) {
//                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
//                } else {
//                    newMap.put(key, value);
//                }
//                iterator.remove();
//            }
//            //存入数据库
//            historyHourlyService.insertHistoryHourly(newMap);
//            ack.acknowledge();
//        } catch (Exception e) {
//            log.error("param{}" + msg);
//        }
//    }
//
//    //秒数据,修改设备状态,缓存最新秒数据
//    @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, containerFactory = "stateListenerContainerFactory")
//    public void listenSecond(ConsumerRecord<String, String> record) {
//        String msg = record.value();
//        try {
//            Map<String, Object> data = JSON.parseObject(msg, Map.class);
//            Object mac = data.get("mac");
//            Object time = data.get("DataTime");
//            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
//                log.warn("some properties is null, param{}", msg);
//                return;
//            }
//            //数据过滤
package com.moral.api.kafka.consumer;
import com.moral.api.service.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import com.alibaba.fastjson.JSON;
import com.moral.constant.KafkaConstants;
import com.moral.constant.RedisConstants;
@Component
@Slf4j
public class DeviceConsumer {
    @Autowired
    private HistoryMinutelyService historyMinutelyService;
    @Autowired
    private HistoryHourlyService historyHourlyService;
    @Autowired
    private DeviceService deviceService;
    @Autowired
    private RedisTemplate redisTemplate;
    @Autowired
    private HistorySecondCruiserService historySecondCruiserService;
    @Autowired
    private HistorySecondUavService historySecondUavService;
    //分钟数据
    @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, containerFactory = "insertListenerContainerFactory")
    public void listenMinute(ConsumerRecord<String, String> record, Acknowledgment ack) {
        String msg = record.value();
        try {
            Map<String, Object> data = JSON.parseObject(msg, Map.class);
            Object mac = data.get("mac");
            Object time = data.get("DataTime");
            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                log.warn("some properties is null, param{}", msg);
                ack.acknowledge();
                return;
            }
            //数据过滤
//            data.remove("time");
//            data.remove("entryTime");
//
//            //数据校准
//            data = deviceService.adjustDeviceData(data,"0");
//            //存入redis
//            data.put("DataTime", time);
//            redisTemplate.opsForHash().put(RedisConstants.DATA_SECOND, mac, data);
//            //判断并修改设备状态
//            data.put("mac", mac);
//            deviceService.judgeDeviceState(data);
//        } catch (Exception e) {
//            log.error("param{}" + msg);
//        }
//    }
//
//    //无人机秒数据
//    @KafkaListener(topics = KafkaConstants.UAV_TOPIC_SECOND, groupId = KafkaConstants.GROUP_INSERT, containerFactory = "insertListenerContainerFactory")
//    public void listenSecondSpecial(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        String msg = record.value();
//        try {
//            Map<String, Object> data = JSON.parseObject(msg, Map.class);
//            Object mac = data.get("mac");
//            Object time = data.get("DataTime");
//            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
//                log.warn("some properties is null, param{}", msg);
//                ack.acknowledge();
//                return;
//            }
//
//            //数据过滤
            data.remove("entryTime");
            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
            Map<String, Object> newMap = new HashMap<>();
            Map.Entry<String, Object> next;
            while (iterator.hasNext()) {
                next = iterator.next();
                String key = next.getKey();
                Object value = next.getValue();
                if (key.contains("-Avg")) {
                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
                } else {
                    newMap.put(key, value);
                }
                iterator.remove();
            }
            //存入数据库
            historyMinutelyService.insertHistoryMinutely(newMap);
            ack.acknowledge();
        } catch (Exception e) {
            log.error("param{}" + msg);
        }
    }
    //小时数据
    @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, containerFactory = "insertListenerContainerFactory")
    public void listenHour(ConsumerRecord<String, String> record, Acknowledgment ack) {
        String msg = record.value();
        try {
            Map<String, Object> data = JSON.parseObject(msg, Map.class);
            Object mac = data.get("mac");
            Object time = data.get("DataTime");
            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                log.warn("some properties is null, param{}", msg);
                ack.acknowledge();
                return;
            }
            //数据过滤
//            data.remove("time");
//            data.remove("entryTime");
//
//            historySecondUavService.insertHistorySecond(data);
//            ack.acknowledge();
//        } catch (Exception e) {
//            log.error("param{}" + msg);
//        }
//    }
//
//    //走航车秒数据
//    @KafkaListener(topics = KafkaConstants.CRUISER_TOPIC_SECOND, containerFactory = "insertListenerContainerFactory")
//    public void listenSecondCruiser(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        String msg = record.value();
//        try {
//            Map<String, Object> data = JSON.parseObject(msg, Map.class);
//            Object mac = data.get("mac");
//            Object time = data.get("DataTime");
//            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
//                log.warn("some properties is null, param{}", msg);
//                ack.acknowledge();
//                return;
//            }
//
//            //数据过滤
//            data.remove("time");
//            data.remove("entryTime");
//
//            historySecondCruiserService.insertHistorySecond(data);
//            ack.acknowledge();
//        } catch (Exception e) {
//            log.error("param{}" + e);
//        }
//    }
//}
            data.remove("entryTime");
            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
            Map<String, Object> newMap = new HashMap<>();
            Map.Entry<String, Object> next;
            while (iterator.hasNext()) {
                next = iterator.next();
                String key = next.getKey();
                Object value = next.getValue();
                if (key.contains("-Avg")) {
                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
                } else {
                    newMap.put(key, value);
                }
                iterator.remove();
            }
            //存入数据库
            historyHourlyService.insertHistoryHourly(newMap);
            ack.acknowledge();
        } catch (Exception e) {
            log.error("param{}" + msg);
        }
    }
    //秒数据,修改设备状态,缓存最新秒数据
    @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, containerFactory = "stateListenerContainerFactory")
    public void listenSecond(ConsumerRecord<String, String> record) {
        String msg = record.value();
        try {
            Map<String, Object> data = JSON.parseObject(msg, Map.class);
            Object mac = data.get("mac");
            Object time = data.get("DataTime");
            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                log.warn("some properties is null, param{}", msg);
                return;
            }
            //数据过滤
            data.remove("time");
            data.remove("entryTime");
            //数据校准
            data = deviceService.adjustDeviceData(data,"0");
            //存入redis
            data.put("DataTime", time);
            redisTemplate.opsForHash().put(RedisConstants.DATA_SECOND, mac, data);
            //判断并修改设备状态
            data.put("mac", mac);
            deviceService.judgeDeviceState(data);
        } catch (Exception e) {
            log.error("param{}" + msg);
        }
    }
    //无人机秒数据
    @KafkaListener(topics = KafkaConstants.UAV_TOPIC_SECOND, groupId = KafkaConstants.GROUP_INSERT, containerFactory = "insertListenerContainerFactory")
    public void listenSecondSpecial(ConsumerRecord<String, String> record, Acknowledgment ack) {
        String msg = record.value();
        try {
            Map<String, Object> data = JSON.parseObject(msg, Map.class);
            Object mac = data.get("mac");
            Object time = data.get("DataTime");
            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                log.warn("some properties is null, param{}", msg);
                ack.acknowledge();
                return;
            }
            //数据过滤
            data.remove("time");
            data.remove("entryTime");
            historySecondUavService.insertHistorySecond(data);
            ack.acknowledge();
        } catch (Exception e) {
            log.error("param{}" + msg);
        }
    }
    //走航车秒数据
    @KafkaListener(topics = KafkaConstants.CRUISER_TOPIC_SECOND, containerFactory = "insertListenerContainerFactory")
    public void listenSecondCruiser(ConsumerRecord<String, String> record, Acknowledgment ack) {
        String msg = record.value();
        try {
            Map<String, Object> data = JSON.parseObject(msg, Map.class);
            Object mac = data.get("mac");
            Object time = data.get("DataTime");
            if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                log.warn("some properties is null, param{}", msg);
                ack.acknowledge();
                return;
            }
            //数据过滤
            data.remove("time");
            data.remove("entryTime");
            historySecondCruiserService.insertHistorySecond(data);
            ack.acknowledge();
        } catch (Exception e) {
            log.error("param{}" + e);
        }
    }
}
screen-manage/src/main/resources/application-qa.yml
@@ -92,11 +92,11 @@
    enable:
      auto:
        commit: false
    servers: 172.16.44.65:9092,172.16.44.67:9092,172.16.44.66:9092
    servers: 47.99.145.48:9092,47.111.116.38:9092,47.111.124.159:9092
    session:
      timeout: 6000
    zookeeper:
      connect: 172.16.44.65:2181,172.16.44.67:2181,172.16.44.66:2181
      connect: 47.99.145.48:2181,47.111.116.38:2181,47.111.124.159:2181
  producer:
    batch:
      size: 4096
@@ -104,7 +104,7 @@
      memory: 40960
    linger: 1
    retries: 0
    servers: 172.16.44.65:9092,172.16.44.67:9092,172.16.44.66:9092
    servers: 47.99.145.48:9092,47.111.116.38:9092,47.111.124.159:9092
  groupId:
    insert: Dev_insert
    state: Dev_state