From 2818433841637ac5aaa8823f2904aec417ef72b7 Mon Sep 17 00:00:00 2001 From: jinpengyong <jpy123456> Date: Thu, 26 Aug 2021 16:23:20 +0800 Subject: [PATCH] kafka消费组配置文件 --- screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java | 121 ++++++++++++++++++++++++++++------------ 1 files changed, 85 insertions(+), 36 deletions(-) diff --git a/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java b/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java index 94ec2f1..5301f10 100644 --- a/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java +++ b/screen-manage/src/main/java/com/moral/api/kafka/consumer/KafkaConsumer.java @@ -7,19 +7,23 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.support.Acknowledgment; import org.springframework.stereotype.Component; -import org.springframework.util.StringUtils; +import org.springframework.util.ObjectUtils; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; -import java.util.stream.Collectors; import com.alibaba.fastjson.JSON; import com.moral.api.service.DeviceService; import com.moral.api.service.HistoryHourlyService; import com.moral.api.service.HistoryMinutelyService; +import com.moral.api.service.HistorySecondSpecialService; import com.moral.constant.KafkaConstants; import com.moral.constant.RedisConstants; +/* + * ������������������ + * */ @Component @Slf4j public class KafkaConsumer { @@ -36,91 +40,136 @@ @Autowired private RedisTemplate redisTemplate; + @Autowired + private HistorySecondSpecialService historySecondSpecialService; + //������������ - @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, groupId = KafkaConstants.GROUP_ID_INSERT, containerFactory = "kafkaListenerContainerFactory") + @KafkaListener(topics = KafkaConstants.TOPIC_MINUTE, containerFactory = "insertListenerContainerFactory") public void listenMinute(ConsumerRecord<String, String> record, Acknowledgment ack) { String msg = record.value(); try { - Map<String, Object> data = JSON.parseObject(msg, HashMap.class); + Map<String, Object> data = JSON.parseObject(msg, Map.class); Object mac = data.get("mac"); Object time = data.get("DataTime"); - Object ver = data.get("ver"); - if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) { + if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) { log.warn("some properties is null, param{}", msg); ack.acknowledge(); return; } //������������ - data = data.entrySet().stream() - .filter(map -> { - String key = map.getKey(); - return !(key.contains("Min") || key.contains("Max") || key.contains("Cou")); - }).collect(Collectors.toMap(m -> m.getKey().replaceAll("-Avg", ""), Map.Entry::getValue)); data.remove("time"); data.remove("entryTime"); + Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator(); + Map<String, Object> newMap = new HashMap<>(); + Map.Entry<String, Object> next; + while (iterator.hasNext()) { + next = iterator.next(); + String key = next.getKey(); + Object value = next.getValue(); + if (key.contains("-Avg")) { + newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString())); + } else { + newMap.put(key, value); + } + iterator.remove(); + } //��������������� - historyMinutelyService.insertHistoryMinutely(data); + historyMinutelyService.insertHistoryMinutely(newMap); ack.acknowledge(); } catch (Exception e) { - //log.error("param{}" + msg); + log.error("param{}" + msg); } } //������������ - @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, groupId = KafkaConstants.GROUP_ID_INSERT, containerFactory = "kafkaListenerContainerFactory") + @KafkaListener(topics = KafkaConstants.TOPIC_HOUR, containerFactory = "insertListenerContainerFactory") public void listenHour(ConsumerRecord<String, String> record, Acknowledgment ack) { String msg = record.value(); try { - Map<String, Object> data = JSON.parseObject(msg, HashMap.class); + Map<String, Object> data = JSON.parseObject(msg, Map.class); Object mac = data.get("mac"); Object time = data.get("DataTime"); - Object ver = data.get("ver"); - if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) { + if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) { log.warn("some properties is null, param{}", msg); ack.acknowledge(); return; } //������������ - data = data.entrySet().stream() - .filter(map -> { - String key = map.getKey(); - return !(key.contains("Min") || key.contains("Max") || key.contains("Cou")); - }).collect(Collectors.toMap(m -> m.getKey().replaceAll("-Avg", ""), Map.Entry::getValue)); data.remove("time"); data.remove("entryTime"); + Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator(); + Map<String, Object> newMap = new HashMap<>(); + Map.Entry<String, Object> next; + while (iterator.hasNext()) { + next = iterator.next(); + String key = next.getKey(); + Object value = next.getValue(); + if (key.contains("-Avg")) { + newMap.put(key.replaceAll("-Avg", ""), Double.parseDouble(value.toString())); + } else { + newMap.put(key, value); + } + iterator.remove(); + } //��������������� - historyHourlyService.insertHistoryHourly(data); + historyHourlyService.insertHistoryHourly(newMap); ack.acknowledge(); } catch (Exception e) { - //log.error("param{}" + msg); + log.error("param{}" + msg); } } //������������������������������������������������������ - @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, groupId = KafkaConstants.GROUP_ID_STATE, containerFactory = "kafkaListenerContainerFactory") - public void listenSecond(ConsumerRecord<String, String> record, Acknowledgment ack) { + @KafkaListener(topics = KafkaConstants.TOPIC_SECOND, containerFactory = "stateListenerContainerFactory") + public void listenSecond(ConsumerRecord<String, String> record) { String msg = record.value(); try { - Map<String, Object> data = JSON.parseObject(msg, HashMap.class); + Map<String, Object> data = JSON.parseObject(msg, Map.class); Object mac = data.get("mac"); Object time = data.get("DataTime"); - Object ver = data.get("ver"); - if (StringUtils.isEmpty(ver) || StringUtils.isEmpty(time) || StringUtils.isEmpty(mac)) { + if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) { + log.warn("some properties is null, param{}", msg); + return; + } + //������������ + data.remove("time"); + data.remove("entryTime"); + + //������������ + data = deviceService.adjustDeviceData(data); + //������redis + redisTemplate.opsForHash().put(RedisConstants.DATA_SECOND, mac, data); + //��������������������������� + deviceService.judgeDeviceState(data); + } catch (Exception e) { + log.error("param{}" + msg); + } + } + + //��������������������� + @KafkaListener(topics = KafkaConstants.TOPIC_SECOND_SPECIAL, containerFactory = "insertListenerContainerFactory") + public void listenSecondSpecial(ConsumerRecord<String, String> record, Acknowledgment ack) { + String msg = record.value(); + try { + Map<String, Object> data = JSON.parseObject(msg, Map.class); + Object mac = data.get("mac"); + Object time = data.get("DataTime"); + if (ObjectUtils.isEmpty(time) || ObjectUtils.isEmpty(mac)) { log.warn("some properties is null, param{}", msg); ack.acknowledge(); return; } - //������������ - data = deviceService.adjustDeviceData(data); - //������redis - redisTemplate.opsForValue().set(RedisConstants.DEVICE_DATA + mac, data); - //��������������������������� - deviceService.judgeDeviceState(data); + + //������������ + data.remove("time"); + data.remove("entryTime"); + + historySecondSpecialService.insertHistorySecond(data); ack.acknowledge(); } catch (Exception e) { - //log.error("param{}" + msg); + log.error("param{}" + msg); } } } -- Gitblit v1.8.0