From 6f4e852b84c577454a4876f83c7085bd360fe4fb Mon Sep 17 00:00:00 2001
From: jinpengyong <jpy123456>
Date: Tue, 17 Aug 2021 17:07:02 +0800
Subject: [PATCH] 特殊设备数据insert

---
 screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java |  129 ++++++++++++++++++++++++++++++------------
 1 files changed, 91 insertions(+), 38 deletions(-)

diff --git a/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java b/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java
index cc98e98..04e47cb 100644
--- a/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java
+++ b/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java
@@ -7,21 +7,24 @@
 import org.springframework.kafka.annotation.KafkaListener;
 import org.springframework.kafka.support.Acknowledgment;
 import org.springframework.stereotype.Component;
-import org.springframework.util.StringUtils;
+import org.springframework.util.ObjectUtils;
 
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.Map;
-import java.util.stream.Collectors;
 
 import com.alibaba.fastjson.JSON;
 import com.moral.api.service.DeviceService;
 import com.moral.api.service.HistoryHourlyService;
 import com.moral.api.service.HistoryMinutelyService;
-import com.moral.api.util.AdjustDataUtils;
+import com.moral.api.service.HistorySecondSpecialService;
 import com.moral.constant.KafkaConstants;
 import com.moral.constant.RedisConstants;
 
-//@Component
+/*
+ * ������������������
+ * */
+@Component
 @Slf4j
 public class KafkaConsumer {
 
@@ -35,94 +38,144 @@
     private DeviceService deviceService;
 
     @Autowired
-    private AdjustDataUtils adjustDataUtils;
-
-    @Autowired
     private RedisTemplate redisTemplate;
 
+    @Autowired
+    private HistorySecondSpecialService historySecondSpecialService;
+
     //������������
-    @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, groupId = KafkaConstants.GROUP_ID_INSERT, containerFactory = "kafkaListenerContainerFactory")
+    @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, groupId = KafkaConstants.GROUP_INSERT, containerFactory = "kafkaListenerContainerFactory")
     public void listenMinute(ConsumerRecord<String, String> record, Acknowledgment ack) {
         String msg = record.value();
         try {
-            Map<String, Object> data = JSON.parseObject(msg, HashMap.class);
+            Map<String, Object> data = JSON.parseObject(msg, Map.class);
             Object mac = data.get("mac");
             Object time = data.get("DataTime");
             Object ver = data.get("ver");
-            if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) {
+            if (ObjectUtils.isEmpty(ver) || ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                 log.warn("some properties is null, param{}", msg);
                 ack.acknowledge();
                 return;
             }
 
             //������������
-            data = data.entrySet().stream()
-                    .filter(map -> {
-                        String key = map.getKey();
-                        return !(key.contains("Min") || key.contains("Max") || key.contains("Cou"));
-                    }).collect(Collectors.toMap(m -> m.getKey().replaceAll("-Avg", ""), Map.Entry::getValue));
             data.remove("time");
+            data.remove("entryTime");
+            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
+            Map<String, Object> newMap = new HashMap<>();
+            Map.Entry<String, Object> next;
+            while (iterator.hasNext()) {
+                next = iterator.next();
+                String key = next.getKey();
+                Object value = next.getValue();
+                if (key.contains("-Avg")) {
+                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
+                } else {
+                    newMap.put(key, value);
+                }
+                iterator.remove();
+            }
             //���������������
-            historyMinutelyService.insertHistoryMinutely(data);
+            historyMinutelyService.insertHistoryMinutely(newMap);
             ack.acknowledge();
         } catch (Exception e) {
-            //log.error("param{}" + msg);
+            log.error("param{}" + msg);
         }
     }
 
     //������������
-    @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, groupId = KafkaConstants.GROUP_ID_INSERT, containerFactory = "kafkaListenerContainerFactory")
+    @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, groupId = KafkaConstants.GROUP_INSERT, containerFactory = "kafkaListenerContainerFactory")
     public void listenHour(ConsumerRecord<String, String> record, Acknowledgment ack) {
         String msg = record.value();
         try {
-            Map<String, Object> data = JSON.parseObject(msg, HashMap.class);
+            Map<String, Object> data = JSON.parseObject(msg, Map.class);
             Object mac = data.get("mac");
             Object time = data.get("DataTime");
             Object ver = data.get("ver");
-            if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) {
+            if (ObjectUtils.isEmpty(ver) || ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                 log.warn("some properties is null, param{}", msg);
                 ack.acknowledge();
                 return;
             }
 
             //������������
-            data = data.entrySet().stream()
-                    .filter(map -> {
-                        String key = map.getKey();
-                        return !(key.contains("Min") || key.contains("Max") || key.contains("Cou"));
-                    }).collect(Collectors.toMap(m -> m.getKey().replaceAll("-Avg", ""), Map.Entry::getValue));
             data.remove("time");
+            data.remove("entryTime");
+            Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
+            Map<String, Object> newMap = new HashMap<>();
+            Map.Entry<String, Object> next;
+            while (iterator.hasNext()) {
+                next = iterator.next();
+                String key = next.getKey();
+                Object value = next.getValue();
+                if (key.contains("-Avg")) {
+                    newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString()));
+                } else {
+                    newMap.put(key, value);
+                }
+                iterator.remove();
+            }
             //���������������
-            historyHourlyService.insertHistoryHourly(data);
+            historyHourlyService.insertHistoryHourly(newMap);
             ack.acknowledge();
         } catch (Exception e) {
-            //log.error("param{}" + msg);
+            log.error("param{}" + msg);
         }
     }
 
     //������������������������������������������������������
-    @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, groupId = KafkaConstants.GROUP_ID_STATE, containerFactory = "kafkaListenerContainerFactory")
-    public void listenSecond(ConsumerRecord<String, String> record, Acknowledgment ack) {
+    @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, groupId = KafkaConstants.GROUP_STATE, containerFactory = "kafkaListenerContainerFactory")
+    public void listenSecond(ConsumerRecord<String, String> record) {
         String msg = record.value();
         try {
-            Map<String, Object> data = JSON.parseObject(msg, HashMap.class);
+            Map<String, Object> data = JSON.parseObject(msg, Map.class);
             Object mac = data.get("mac");
             Object time = data.get("DataTime");
             Object ver = data.get("ver");
-            if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) {
+            if (ObjectUtils.isEmpty(ver) || ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
+                log.warn("some properties is null, param{}", msg);
+                return;
+            }
+            //������������
+            data.remove("time");
+            data.remove("entryTime");
+            data.remove("ver");
+
+            //������������
+            data = deviceService.adjustDeviceData(data);
+            //������redis
+            redisTemplate.opsForHash().put(RedisConstants.DATA_SECOND, mac, data);
+            //���������������������������
+            deviceService.judgeDeviceState(data);
+        } catch (Exception e) {
+            log.error("param{}" + msg);
+        }
+    }
+
+    //���������������������
+    @KafkaListener(topics = KafkaConstants.TOPIC_SECOND_SPECIAL, groupId = KafkaConstants.GROUP_INSERT, containerFactory = "kafkaListenerContainerFactory")
+    public void listenSecondSpecial(ConsumerRecord<String, String> record, Acknowledgment ack) {
+        String msg = record.value();
+        try {
+            Map<String, Object> data = JSON.parseObject(msg, Map.class);
+            Object mac = data.get("mac");
+            Object time = data.get("DataTime");
+            Object ver = data.get("ver");
+            if (ObjectUtils.isEmpty(ver) || ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) {
                 log.warn("some properties is null, param{}", msg);
                 ack.acknowledge();
                 return;
             }
-            //������������
-            data = adjustDataUtils.adjust(data);
-            //������redis
-            redisTemplate.opsForValue().set(RedisConstants.DEVICE_DATA + "_" + mac, data);
-            //���������������������������
-            deviceService.judgeDeviceState(data);
+
+            //������������
+            data.remove("time");
+            data.remove("entryTime");
+            data.remove("ver");
+
+            historySecondSpecialService.insertHistorySecond(data);
             ack.acknowledge();
         } catch (Exception e) {
-            //log.error("param{}" + msg);
+            log.error("param{}" + msg);
         }
     }
 }

--
Gitblit v1.8.0