[monitor]优化去除Kafka etcd依赖

This commit is contained in:
tomsun28
2022-01-29 17:11:22 +08:00
parent 6f8e400cab
commit 739dcd6308
60 changed files with 3136 additions and 707 deletions

View File

@@ -1,7 +1,6 @@
package com.usthe.alert;
import com.usthe.alert.pojo.entity.Alert;
import com.usthe.common.entity.message.CollectRep;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -17,22 +16,12 @@ import java.util.concurrent.TimeUnit;
@Slf4j
public class AlerterDataQueue {
private final LinkedBlockingQueue<CollectRep.MetricsData> metricsDataQueue;
private final LinkedBlockingQueue<Alert> alertDataQueue;
public AlerterDataQueue() {
metricsDataQueue = new LinkedBlockingQueue<>();
alertDataQueue = new LinkedBlockingQueue<>();
}
public void addMetricsData(CollectRep.MetricsData metricsData) {
metricsDataQueue.offer(metricsData);
}
public CollectRep.MetricsData pollMetricsData() throws InterruptedException {
return metricsDataQueue.poll(2, TimeUnit.SECONDS);
}
public void addAlertData(Alert alert) {
alertDataQueue.offer(alert);
}

View File

@@ -4,16 +4,15 @@ import com.googlecode.aviator.AviatorEvaluator;
import com.googlecode.aviator.Expression;
import com.usthe.alert.AlerterWorkerPool;
import com.usthe.alert.AlerterDataQueue;
import com.usthe.alert.entrance.KafkaDataConsume;
import com.usthe.alert.pojo.entity.Alert;
import com.usthe.alert.pojo.entity.AlertDefine;
import com.usthe.alert.service.AlertDefineService;
import com.usthe.alert.util.AlertTemplateUtil;
import com.usthe.collector.dispatch.export.MetricsDataExporter;
import com.usthe.common.entity.message.CollectRep;
import com.usthe.common.util.CommonConstants;
import com.usthe.common.util.CommonUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.context.annotation.Configuration;
import java.util.HashMap;
@@ -27,20 +26,21 @@ import java.util.concurrent.ConcurrentHashMap;
* @date 2021/12/9 14:19
*/
@Configuration
@AutoConfigureAfter(value = {KafkaDataConsume.class})
@Slf4j
public class CalculateAlarm {
private AlerterWorkerPool workerPool;
private AlerterDataQueue dataQueue;
private MetricsDataExporter dataExporter;
private AlertDefineService alertDefineService;
private Map<String, Alert> triggeredAlertMap;
private Map<Long, CollectRep.Code> triggeredMonitorStateAlertMap;
public CalculateAlarm (AlerterWorkerPool workerPool, AlerterDataQueue dataQueue,
AlertDefineService alertDefineService) {
AlertDefineService alertDefineService, MetricsDataExporter dataExporter) {
this.workerPool = workerPool;
this.dataQueue = dataQueue;
this.dataExporter = dataExporter;
this.alertDefineService = alertDefineService;
this.triggeredAlertMap = new ConcurrentHashMap<>(128);
this.triggeredMonitorStateAlertMap = new ConcurrentHashMap<>(128);
@@ -51,7 +51,7 @@ public class CalculateAlarm {
Runnable runnable = () -> {
while (!Thread.currentThread().isInterrupted()) {
try {
CollectRep.MetricsData metricsData = dataQueue.pollMetricsData();
CollectRep.MetricsData metricsData = dataExporter.pollAlertMetricsData();
if (metricsData != null) {
calculate(metricsData);
}

View File

@@ -1,80 +0,0 @@
package com.usthe.alert.entrance;
import com.usthe.alert.AlerterProperties;
import com.usthe.alert.AlerterWorkerPool;
import com.usthe.alert.AlerterDataQueue;
import com.usthe.common.entity.message.CollectRep;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
/**
* 从Kafka消费指标组采集数据处理
* @author tom
* @date 2021/11/24 18:03
*/
@Configuration
@AutoConfigureAfter(value = {AlerterProperties.class})
@ConditionalOnProperty(prefix = "alerter.entrance.kafka",
name = "enabled", havingValue = "true", matchIfMissing = true)
@Slf4j
public class KafkaDataConsume implements DisposableBean {
private KafkaConsumer<Long, CollectRep.MetricsData> consumer;
private AlerterWorkerPool workerPool;
private AlerterDataQueue dataQueue;
public KafkaDataConsume(AlerterProperties properties, AlerterWorkerPool workerPool,
AlerterDataQueue dataQueue) {
this.workerPool = workerPool;
this.dataQueue = dataQueue;
initConsumer(properties);
startConsumeData();
}
private void startConsumeData() {
Runnable runnable = () -> {
Thread.currentThread().setName("warehouse-kafka-data-consumer");
while (!Thread.currentThread().isInterrupted()) {
ConsumerRecords<Long, CollectRep.MetricsData> records = consumer.poll(Duration.ofMillis(100));
records.forEach(record -> {
dataQueue.addMetricsData(record.value());
});
}
};
workerPool.executeJob(runnable);
}
private void initConsumer(AlerterProperties properties) {
if (properties == null || properties.getEntrance() == null || properties.getEntrance().getKafka() == null) {
log.error("init error, please config Warehouse kafka props in application.yml");
throw new IllegalArgumentException("please config Warehouse kafka props");
}
AlerterProperties.EntranceProperties.KafkaProperties kafkaProp = properties.getEntrance().getKafka();
Properties consumerProp = new Properties();
consumerProp.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProp.getServers());
consumerProp.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProp.getGroupId());
consumerProp.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
consumerProp.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaMetricsDataDeserializer.class);
consumerProp.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
consumerProp.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
consumer = new KafkaConsumer<>(consumerProp);
consumer.subscribe(Collections.singleton(kafkaProp.getTopic()));
}
@Override
public void destroy() throws Exception {
if (consumer != null) {
consumer.close();
}
}
}

View File

@@ -1,24 +0,0 @@
package com.usthe.alert.entrance;
import com.usthe.common.entity.message.CollectRep;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Deserializer;
/**
* MetricsData的反序列化
* @author tom
* @date 2021/11/24 17:29
*/
@Slf4j
public class KafkaMetricsDataDeserializer implements Deserializer<CollectRep.MetricsData> {
@Override
public CollectRep.MetricsData deserialize(String topicName, byte[] bytes) {
try {
return CollectRep.MetricsData.parseFrom(bytes);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
}

View File

@@ -6,7 +6,6 @@ com.usthe.alert.AlerterWorkerPool,\
com.usthe.alert.AlerterProperties,\
com.usthe.alert.AlerterDataQueue,\
com.usthe.alert.AlerterConfiguration,\
com.usthe.alert.entrance.KafkaDataConsume,\
com.usthe.alert.calculate.CalculateAlarm,\
com.usthe.alert.controller.AlertsController,\
com.usthe.alert.controller.AlertDefinesController