[monitor]优化去除Kafka etcd依赖
This commit is contained in:
BIN
Architecture.jpg
BIN
Architecture.jpg
Binary file not shown.
|
Before Width: | Height: | Size: 250 KiB |
14
README.md
14
README.md
@@ -4,16 +4,12 @@
|
|||||||
|
|
||||||
### 模块
|
### 模块
|
||||||
- **[manager](manager)** 提供监控管理,系统管理基础服务
|
- **[manager](manager)** 提供监控管理,系统管理基础服务
|
||||||
> 开发中,提供对监控的管理,监控应用配置的管理,系统用户租户后台管理等。
|
> 提供对监控的管理,监控应用配置的管理,系统用户租户后台管理等。
|
||||||
- **[collector](collector)** 提供监控数据采集服务
|
- **[collector](collector)** 提供监控数据采集服务
|
||||||
> 开发中,使用通用协议远程采集获取对端指标数据。
|
> 使用通用协议远程采集获取对端指标数据。
|
||||||
- **[scheduler](scheduler)** 提供监控任务调度服务
|
- **[scheduler](scheduler)** 提供监控任务调度服务
|
||||||
> 开发完成,采集任务管理,一次性任务和周期性任务的调度分发。
|
> 采集任务管理,一次性任务和周期性任务的调度分发。
|
||||||
- **[warehouse](warehouse)** 提供监控数据仓储服务
|
- **[warehouse](warehouse)** 提供监控数据仓储服务
|
||||||
> 开发中,采集指标结果数据管理,数据落盘,查询,计算统计。
|
> 采集指标结果数据管理,数据落盘,查询,计算统计。
|
||||||
- **[alerter](alerter)** 提供告警服务
|
- **[alerter](alerter)** 提供告警服务
|
||||||
> 开发中,告警计算触发,监控状态联动,告警配置,告警通知。
|
> 告警计算触发,监控状态联动,告警配置,告警通知。
|
||||||
|
|
||||||
### 结构
|
|
||||||
|
|
||||||

|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
@@ -20,7 +20,14 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>common</artifactId>
|
<artifactId>common</artifactId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- collector -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
|
<artifactId>collector</artifactId>
|
||||||
|
<version>1.0</version>
|
||||||
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- spring -->
|
<!-- spring -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package com.usthe.alert;
|
package com.usthe.alert;
|
||||||
|
|
||||||
import com.usthe.alert.pojo.entity.Alert;
|
import com.usthe.alert.pojo.entity.Alert;
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
@@ -17,22 +16,12 @@ import java.util.concurrent.TimeUnit;
|
|||||||
@Slf4j
|
@Slf4j
|
||||||
public class AlerterDataQueue {
|
public class AlerterDataQueue {
|
||||||
|
|
||||||
private final LinkedBlockingQueue<CollectRep.MetricsData> metricsDataQueue;
|
|
||||||
private final LinkedBlockingQueue<Alert> alertDataQueue;
|
private final LinkedBlockingQueue<Alert> alertDataQueue;
|
||||||
|
|
||||||
public AlerterDataQueue() {
|
public AlerterDataQueue() {
|
||||||
metricsDataQueue = new LinkedBlockingQueue<>();
|
|
||||||
alertDataQueue = new LinkedBlockingQueue<>();
|
alertDataQueue = new LinkedBlockingQueue<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addMetricsData(CollectRep.MetricsData metricsData) {
|
|
||||||
metricsDataQueue.offer(metricsData);
|
|
||||||
}
|
|
||||||
|
|
||||||
public CollectRep.MetricsData pollMetricsData() throws InterruptedException {
|
|
||||||
return metricsDataQueue.poll(2, TimeUnit.SECONDS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addAlertData(Alert alert) {
|
public void addAlertData(Alert alert) {
|
||||||
alertDataQueue.offer(alert);
|
alertDataQueue.offer(alert);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,16 +4,15 @@ import com.googlecode.aviator.AviatorEvaluator;
|
|||||||
import com.googlecode.aviator.Expression;
|
import com.googlecode.aviator.Expression;
|
||||||
import com.usthe.alert.AlerterWorkerPool;
|
import com.usthe.alert.AlerterWorkerPool;
|
||||||
import com.usthe.alert.AlerterDataQueue;
|
import com.usthe.alert.AlerterDataQueue;
|
||||||
import com.usthe.alert.entrance.KafkaDataConsume;
|
|
||||||
import com.usthe.alert.pojo.entity.Alert;
|
import com.usthe.alert.pojo.entity.Alert;
|
||||||
import com.usthe.alert.pojo.entity.AlertDefine;
|
import com.usthe.alert.pojo.entity.AlertDefine;
|
||||||
import com.usthe.alert.service.AlertDefineService;
|
import com.usthe.alert.service.AlertDefineService;
|
||||||
import com.usthe.alert.util.AlertTemplateUtil;
|
import com.usthe.alert.util.AlertTemplateUtil;
|
||||||
|
import com.usthe.collector.dispatch.export.MetricsDataExporter;
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
import com.usthe.common.util.CommonConstants;
|
import com.usthe.common.util.CommonConstants;
|
||||||
import com.usthe.common.util.CommonUtil;
|
import com.usthe.common.util.CommonUtil;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -27,20 +26,21 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
* @date 2021/12/9 14:19
|
* @date 2021/12/9 14:19
|
||||||
*/
|
*/
|
||||||
@Configuration
|
@Configuration
|
||||||
@AutoConfigureAfter(value = {KafkaDataConsume.class})
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CalculateAlarm {
|
public class CalculateAlarm {
|
||||||
|
|
||||||
private AlerterWorkerPool workerPool;
|
private AlerterWorkerPool workerPool;
|
||||||
private AlerterDataQueue dataQueue;
|
private AlerterDataQueue dataQueue;
|
||||||
|
private MetricsDataExporter dataExporter;
|
||||||
private AlertDefineService alertDefineService;
|
private AlertDefineService alertDefineService;
|
||||||
private Map<String, Alert> triggeredAlertMap;
|
private Map<String, Alert> triggeredAlertMap;
|
||||||
private Map<Long, CollectRep.Code> triggeredMonitorStateAlertMap;
|
private Map<Long, CollectRep.Code> triggeredMonitorStateAlertMap;
|
||||||
|
|
||||||
public CalculateAlarm (AlerterWorkerPool workerPool, AlerterDataQueue dataQueue,
|
public CalculateAlarm (AlerterWorkerPool workerPool, AlerterDataQueue dataQueue,
|
||||||
AlertDefineService alertDefineService) {
|
AlertDefineService alertDefineService, MetricsDataExporter dataExporter) {
|
||||||
this.workerPool = workerPool;
|
this.workerPool = workerPool;
|
||||||
this.dataQueue = dataQueue;
|
this.dataQueue = dataQueue;
|
||||||
|
this.dataExporter = dataExporter;
|
||||||
this.alertDefineService = alertDefineService;
|
this.alertDefineService = alertDefineService;
|
||||||
this.triggeredAlertMap = new ConcurrentHashMap<>(128);
|
this.triggeredAlertMap = new ConcurrentHashMap<>(128);
|
||||||
this.triggeredMonitorStateAlertMap = new ConcurrentHashMap<>(128);
|
this.triggeredMonitorStateAlertMap = new ConcurrentHashMap<>(128);
|
||||||
@@ -51,7 +51,7 @@ public class CalculateAlarm {
|
|||||||
Runnable runnable = () -> {
|
Runnable runnable = () -> {
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
while (!Thread.currentThread().isInterrupted()) {
|
||||||
try {
|
try {
|
||||||
CollectRep.MetricsData metricsData = dataQueue.pollMetricsData();
|
CollectRep.MetricsData metricsData = dataExporter.pollAlertMetricsData();
|
||||||
if (metricsData != null) {
|
if (metricsData != null) {
|
||||||
calculate(metricsData);
|
calculate(metricsData);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,80 +0,0 @@
|
|||||||
package com.usthe.alert.entrance;
|
|
||||||
|
|
||||||
import com.usthe.alert.AlerterProperties;
|
|
||||||
import com.usthe.alert.AlerterWorkerPool;
|
|
||||||
import com.usthe.alert.AlerterDataQueue;
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
|
||||||
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|
||||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
|
||||||
import org.springframework.beans.factory.DisposableBean;
|
|
||||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
|
||||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 从Kafka消费指标组采集数据处理
|
|
||||||
* @author tom
|
|
||||||
* @date 2021/11/24 18:03
|
|
||||||
*/
|
|
||||||
@Configuration
|
|
||||||
@AutoConfigureAfter(value = {AlerterProperties.class})
|
|
||||||
@ConditionalOnProperty(prefix = "alerter.entrance.kafka",
|
|
||||||
name = "enabled", havingValue = "true", matchIfMissing = true)
|
|
||||||
@Slf4j
|
|
||||||
public class KafkaDataConsume implements DisposableBean {
|
|
||||||
|
|
||||||
private KafkaConsumer<Long, CollectRep.MetricsData> consumer;
|
|
||||||
private AlerterWorkerPool workerPool;
|
|
||||||
private AlerterDataQueue dataQueue;
|
|
||||||
public KafkaDataConsume(AlerterProperties properties, AlerterWorkerPool workerPool,
|
|
||||||
AlerterDataQueue dataQueue) {
|
|
||||||
this.workerPool = workerPool;
|
|
||||||
this.dataQueue = dataQueue;
|
|
||||||
initConsumer(properties);
|
|
||||||
startConsumeData();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void startConsumeData() {
|
|
||||||
Runnable runnable = () -> {
|
|
||||||
Thread.currentThread().setName("warehouse-kafka-data-consumer");
|
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
|
||||||
ConsumerRecords<Long, CollectRep.MetricsData> records = consumer.poll(Duration.ofMillis(100));
|
|
||||||
records.forEach(record -> {
|
|
||||||
dataQueue.addMetricsData(record.value());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
workerPool.executeJob(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initConsumer(AlerterProperties properties) {
|
|
||||||
if (properties == null || properties.getEntrance() == null || properties.getEntrance().getKafka() == null) {
|
|
||||||
log.error("init error, please config Warehouse kafka props in application.yml");
|
|
||||||
throw new IllegalArgumentException("please config Warehouse kafka props");
|
|
||||||
}
|
|
||||||
AlerterProperties.EntranceProperties.KafkaProperties kafkaProp = properties.getEntrance().getKafka();
|
|
||||||
Properties consumerProp = new Properties();
|
|
||||||
consumerProp.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProp.getServers());
|
|
||||||
consumerProp.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProp.getGroupId());
|
|
||||||
consumerProp.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
|
||||||
consumerProp.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaMetricsDataDeserializer.class);
|
|
||||||
consumerProp.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
|
|
||||||
consumerProp.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
|
|
||||||
consumer = new KafkaConsumer<>(consumerProp);
|
|
||||||
consumer.subscribe(Collections.singleton(kafkaProp.getTopic()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void destroy() throws Exception {
|
|
||||||
if (consumer != null) {
|
|
||||||
consumer.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
package com.usthe.alert.entrance;
|
|
||||||
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.common.serialization.Deserializer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* MetricsData的反序列化
|
|
||||||
* @author tom
|
|
||||||
* @date 2021/11/24 17:29
|
|
||||||
*/
|
|
||||||
@Slf4j
|
|
||||||
public class KafkaMetricsDataDeserializer implements Deserializer<CollectRep.MetricsData> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CollectRep.MetricsData deserialize(String topicName, byte[] bytes) {
|
|
||||||
try {
|
|
||||||
return CollectRep.MetricsData.parseFrom(bytes);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -6,7 +6,6 @@ com.usthe.alert.AlerterWorkerPool,\
|
|||||||
com.usthe.alert.AlerterProperties,\
|
com.usthe.alert.AlerterProperties,\
|
||||||
com.usthe.alert.AlerterDataQueue,\
|
com.usthe.alert.AlerterDataQueue,\
|
||||||
com.usthe.alert.AlerterConfiguration,\
|
com.usthe.alert.AlerterConfiguration,\
|
||||||
com.usthe.alert.entrance.KafkaDataConsume,\
|
|
||||||
com.usthe.alert.calculate.CalculateAlarm,\
|
com.usthe.alert.calculate.CalculateAlarm,\
|
||||||
com.usthe.alert.controller.AlertsController,\
|
com.usthe.alert.controller.AlertsController,\
|
||||||
com.usthe.alert.controller.AlertDefinesController
|
com.usthe.alert.controller.AlertDefinesController
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
<assembly xmlns="http://maven.apache.org/ASSEMBLY/2.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd
|
|
||||||
http://maven.apache.org/ASSEMBLY/2.0.0 ">
|
|
||||||
<!--必填,会追加到打包文件名称的末尾-->
|
|
||||||
<id>1.0</id>
|
|
||||||
<!--打包类型,可以设置多种类型,打包的时候不同的类型都会打包打出来-->
|
|
||||||
<formats>
|
|
||||||
<format>tar.gz</format>
|
|
||||||
<format>zip</format>
|
|
||||||
</formats>
|
|
||||||
|
|
||||||
<!--文件相关设置-->
|
|
||||||
<fileSets>
|
|
||||||
<!--bin文件下的所有脚本文件输出到打包后的bin目录下-->
|
|
||||||
<fileSet>
|
|
||||||
<directory>../../assembly/collector/bin</directory>
|
|
||||||
<!-- 是否进行属性替换 即使用 ${project.artifactId} -->
|
|
||||||
<filtered>true</filtered>
|
|
||||||
<outputDirectory>bin</outputDirectory>
|
|
||||||
</fileSet>
|
|
||||||
|
|
||||||
<!-- src/main/resources目录下配置文件打包到config目录下 -->
|
|
||||||
<fileSet>
|
|
||||||
<directory>src/main/resources</directory>
|
|
||||||
<includes>
|
|
||||||
<include>application.yml</include>
|
|
||||||
<include>logback-spring.xml</include>
|
|
||||||
</includes>
|
|
||||||
<!-- 是否进行属性替换 即使用 ${project.artifactId} -->
|
|
||||||
<filtered>true</filtered>
|
|
||||||
<outputDirectory>${file.separator}config</outputDirectory>
|
|
||||||
</fileSet>
|
|
||||||
|
|
||||||
<!-- 将target目录下的启动jar打包到目录下-->
|
|
||||||
<fileSet>
|
|
||||||
<directory>target</directory>
|
|
||||||
<outputDirectory>/</outputDirectory>
|
|
||||||
<includes>
|
|
||||||
<include>*executable.jar</include>
|
|
||||||
</includes>
|
|
||||||
</fileSet>
|
|
||||||
</fileSets>
|
|
||||||
</assembly>
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# 项目名称
|
|
||||||
SERVER_NAME="${project.artifactId}"
|
|
||||||
|
|
||||||
# jar名称
|
|
||||||
JAR_NAME="${project.build.finalName}-executable.jar"
|
|
||||||
|
|
||||||
# 进入bin目录
|
|
||||||
cd `dirname $0`
|
|
||||||
# bin目录绝对路径
|
|
||||||
BIN_DIR=`pwd`
|
|
||||||
# 返回到上一级项目根目录路径
|
|
||||||
cd ..
|
|
||||||
# 打印项目根目录绝对路径
|
|
||||||
# `pwd` 执行系统命令并获得结果
|
|
||||||
DEPLOY_DIR=`pwd`
|
|
||||||
|
|
||||||
# 外部配置文件绝对目录,如果是目录需要/结尾,也可以直接指定文件
|
|
||||||
# 如果指定的是目录,spring则会读取目录中的所有配置文件
|
|
||||||
CONF_DIR=$DEPLOY_DIR/config
|
|
||||||
# SERVER_PORT=`sed '/server.port/!d;s/.*=//' config/application.properties | tr -d '\r'`
|
|
||||||
# 获取应用的端口号
|
|
||||||
SERVER_PORT=`sed -nr '/port: [0-9]+/ s/.*port: +([0-9]+).*/\1/p' config/application.yml`
|
|
||||||
|
|
||||||
PIDS=`ps -f | grep java | grep "$CONF_DIR" |awk '{print $2}'`
|
|
||||||
if [ "$1" = "status" ]; then
|
|
||||||
if [ -n "$PIDS" ]; then
|
|
||||||
echo "The $SERVER_NAME is running...!"
|
|
||||||
echo "PID: $PIDS"
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
echo "The $SERVER_NAME is stopped"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$PIDS" ]; then
|
|
||||||
echo "ERROR: The $SERVER_NAME already started!"
|
|
||||||
echo "PID: $PIDS"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$SERVER_PORT" ]; then
|
|
||||||
SERVER_PORT_COUNT=`netstat -tln | grep $SERVER_PORT | wc -l`
|
|
||||||
if [ $SERVER_PORT_COUNT -gt 0 ]; then
|
|
||||||
echo "ERROR: The $SERVER_NAME port $SERVER_PORT already used!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 项目日志输出绝对路径
|
|
||||||
LOGS_DIR=$DEPLOY_DIR/logs
|
|
||||||
# 如果logs文件夹不存在,则创建文件夹
|
|
||||||
if [ ! -d $LOGS_DIR ]; then
|
|
||||||
mkdir $LOGS_DIR
|
|
||||||
fi
|
|
||||||
STDOUT_FILE=$LOGS_DIR/catalina.log
|
|
||||||
|
|
||||||
# JVM Configuration
|
|
||||||
JAVA_OPTS=" -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true "
|
|
||||||
JAVA_DEBUG_OPTS=""
|
|
||||||
if [ "$1" = "debug" ]; then
|
|
||||||
JAVA_DEBUG_OPTS=" -Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n "
|
|
||||||
fi
|
|
||||||
|
|
||||||
JAVA_JMX_OPTS=""
|
|
||||||
if [ "$1" = "jmx" ]; then
|
|
||||||
JAVA_JMX_OPTS=" -Dcom.sun.management.jmxremote.port=1099 -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false "
|
|
||||||
fi
|
|
||||||
|
|
||||||
JAVA_MEM_OPTS=""
|
|
||||||
BITS=`java -version 2>&1 | grep -i 64-bit`
|
|
||||||
if [ -n "$BITS" ]; then
|
|
||||||
JAVA_MEM_OPTS=" -server -Xmx512m -Xms512m -Xmn256m -XX:PermSize=128m -Xss256k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70 "
|
|
||||||
else
|
|
||||||
JAVA_MEM_OPTS=" -server -Xms512m -Xmx512m -XX:PermSize=128m -XX:SurvivorRatio=2 -XX:+UseParallelGC "
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 加载外部log4j2文件的配置
|
|
||||||
LOG_IMPL_FILE=log4j2.xml
|
|
||||||
LOGGING_CONFIG=""
|
|
||||||
if [ -f "$CONF_DIR/$LOG_IMPL_FILE" ]
|
|
||||||
then
|
|
||||||
LOGGING_CONFIG="-Dlogging.config=$CONF_DIR/$LOG_IMPL_FILE"
|
|
||||||
fi
|
|
||||||
CONFIG_FILES=" -Dlogging.path=$LOGS_DIR $LOGGING_CONFIG -Dspring.config.location=$CONF_DIR/ "
|
|
||||||
echo -e "Starting the $SERVER_NAME ..."
|
|
||||||
nohup java $JAVA_OPTS $JAVA_MEM_OPTS $JAVA_DEBUG_OPTS $JAVA_JMX_OPTS $CONFIG_FILES -jar $DEPLOY_DIR/lib/$JAR_NAME > $STDOUT_FILE 2>&1 &
|
|
||||||
|
|
||||||
COUNT=0
|
|
||||||
while [ $COUNT -lt 1 ]; do
|
|
||||||
echo -e ".\c"
|
|
||||||
sleep 1
|
|
||||||
if [ -n "$SERVER_PORT" ]; then
|
|
||||||
COUNT=`netstat -an | grep $SERVER_PORT | wc -l`
|
|
||||||
else
|
|
||||||
COUNT=`ps -f | grep java | grep "$DEPLOY_DIR" | awk '{print $2}' | wc -l`
|
|
||||||
fi
|
|
||||||
if [ $COUNT -gt 0 ]; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
echo "OK!"
|
|
||||||
PIDS=`ps -f | grep java | grep "$DEPLOY_DIR" | awk '{print $2}'`
|
|
||||||
echo "PID: $PIDS"
|
|
||||||
echo "STDOUT: $STDOUT_FILE"
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# 项目名称
|
|
||||||
APPLICATION="${project.artifactId}"
|
|
||||||
|
|
||||||
# 项目启动jar包名称
|
|
||||||
APPLICATION_JAR="${project.build.finalName}.jar"
|
|
||||||
|
|
||||||
# 通过项目名称查找到PI,然后kill -9 pid
|
|
||||||
PID=$(ps -ef | grep "${APPLICATION_JAR}" | grep -v grep | awk '{ print $2 }')
|
|
||||||
if [[ -z "$PID" ]]
|
|
||||||
then
|
|
||||||
echo ${APPLICATION} is already stopped
|
|
||||||
else
|
|
||||||
echo kill ${PID}
|
|
||||||
kill -9 ${PID}
|
|
||||||
echo ${APPLICATION} stopped successfully
|
|
||||||
fi
|
|
||||||
@@ -31,18 +31,6 @@
|
|||||||
* Ping
|
* Ping
|
||||||
* 服务端口
|
* 服务端口
|
||||||
|
|
||||||
#### HELP
|
#### HELP
|
||||||
|
|
||||||
1. ARK插件类隔离未生效
|
|
||||||
> 注意需构建在jdk1.8环境中运行
|
|
||||||
> 插件是否配置导入并配置正确
|
|
||||||
> 本地DEBUG时需单独IDEA打开运行collector工程,不能将plugin和collector在同一工程打开运行
|
|
||||||
|
|
||||||
2. metaspace元空间内存占用多或溢出
|
|
||||||
> 建议调整JVM参数 ```-Dsun.reflect.inflationThreshold=100000```
|
|
||||||
> 由于使用太多反射,超过参数`inflationThreshold`默认值15阈值导致触发JVM反射优化(加快反射速度),
|
|
||||||
> 反射获取类信息由使用*JNI存取器**膨胀(Inflation)*
|
|
||||||
> 为*反射每个方法生成一个类加载器DelegatingClassLoader和Java类MethodAccessor*.
|
|
||||||
> 动态加载的字节码导致PermGen持续增长.
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<parent>
|
|
||||||
<artifactId>collector</artifactId>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<artifactId>plugins</artifactId>
|
|
||||||
<packaging>pom</packaging>
|
|
||||||
<modules>
|
|
||||||
<module>sample-plugin</module>
|
|
||||||
</modules>
|
|
||||||
|
|
||||||
|
|
||||||
</project>
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<parent>
|
|
||||||
<artifactId>plugins</artifactId>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<artifactId>sample-plugin</artifactId>
|
|
||||||
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<!--link https://www.sofastack.tech/projects/sofa-boot/sofa-ark-ark-plugin/ -->
|
|
||||||
<groupId>com.alipay.sofa</groupId>
|
|
||||||
<artifactId>sofa-ark-plugin-maven-plugin</artifactId>
|
|
||||||
<version>1.1.6</version>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>default-cli</id>
|
|
||||||
<goals>
|
|
||||||
<goal>ark-plugin</goal>
|
|
||||||
</goals>
|
|
||||||
|
|
||||||
<configuration>
|
|
||||||
<!-- 配置优先级,数字越小,优先级越高,优先启动,优先导出类,默认1000 -->
|
|
||||||
<priority>2000</priority>
|
|
||||||
|
|
||||||
<!-- 配置导出类、资源 -->
|
|
||||||
<exported>
|
|
||||||
<!-- 配置类级别导出类 -->
|
|
||||||
<classes>
|
|
||||||
<class>com.com.usthe.plugin.sample.ExportDemo</class>
|
|
||||||
</classes>
|
|
||||||
</exported>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
|
|
||||||
</project>
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
package com.usthe.collector.plugin;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/10/8 15:12
|
|
||||||
*/
|
|
||||||
public class SameClass {
|
|
||||||
|
|
||||||
public static String hello() {
|
|
||||||
return "hello plugin";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
package com.usthe.plugin.sample;
|
|
||||||
|
|
||||||
import com.usthe.collector.plugin.SameClass;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/10/8 15:11
|
|
||||||
*/
|
|
||||||
public class ExportDemo {
|
|
||||||
|
|
||||||
public String hello() {
|
|
||||||
return SameClass.hello();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -5,14 +5,85 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<artifactId>collector</artifactId>
|
<artifactId>collector</artifactId>
|
||||||
<packaging>pom</packaging>
|
|
||||||
<modules>
|
<dependencies>
|
||||||
<module>server</module>
|
<!-- spring -->
|
||||||
<module>plugins</module>
|
<dependency>
|
||||||
</modules>
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-starter-web</artifactId>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-autoconfigure</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<!-- common -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
|
<artifactId>common</artifactId>
|
||||||
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- etcd -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.etcd</groupId>
|
||||||
|
<artifactId>jetcd-core</artifactId>
|
||||||
|
<version>0.5.11</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- kafka -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.kafka</groupId>
|
||||||
|
<artifactId>kafka-clients</artifactId>
|
||||||
|
<version>3.0.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- http -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
|
<artifactId>httpclient</artifactId>
|
||||||
|
<version>4.5.13</version>
|
||||||
|
</dependency>
|
||||||
|
<!--network-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-net</groupId>
|
||||||
|
<artifactId>commons-net</artifactId>
|
||||||
|
<version>3.8.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!--json path parser-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.jayway.jsonpath</groupId>
|
||||||
|
<artifactId>json-path</artifactId>
|
||||||
|
<version>2.6.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- lru hashmap -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
|
||||||
|
<artifactId>concurrentlinkedhashmap-lru</artifactId>
|
||||||
|
<version>1.4.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.guava</groupId>
|
||||||
|
<artifactId>guava</artifactId>
|
||||||
|
<version>31.0.1-jre</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.code.gson</groupId>
|
||||||
|
<artifactId>gson</artifactId>
|
||||||
|
<version>2.8.8</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.aviator</groupId>
|
||||||
|
<artifactId>aviator</artifactId>
|
||||||
|
<version>5.2.7</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<parent>
|
|
||||||
<artifactId>collector</artifactId>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<artifactId>server</artifactId>
|
|
||||||
|
|
||||||
<dependencies>
|
|
||||||
<!-- spring -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-starter-webflux</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-configuration-processor</artifactId>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
<!-- isolation -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.alipay.sofa</groupId>
|
|
||||||
<artifactId>sofa-ark-springboot-starter</artifactId>
|
|
||||||
<version>1.1.6</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- common -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<artifactId>common</artifactId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- etcd -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.etcd</groupId>
|
|
||||||
<artifactId>jetcd-core</artifactId>
|
|
||||||
<version>0.5.11</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- kafka -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.kafka</groupId>
|
|
||||||
<artifactId>kafka-clients</artifactId>
|
|
||||||
<version>3.0.0</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- http -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.httpcomponents</groupId>
|
|
||||||
<artifactId>httpclient</artifactId>
|
|
||||||
<version>4.5.13</version>
|
|
||||||
</dependency>
|
|
||||||
<!--network-->
|
|
||||||
<dependency>
|
|
||||||
<groupId>commons-net</groupId>
|
|
||||||
<artifactId>commons-net</artifactId>
|
|
||||||
<version>3.8.0</version>
|
|
||||||
</dependency>
|
|
||||||
<!--json path parser-->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.jayway.jsonpath</groupId>
|
|
||||||
<artifactId>json-path</artifactId>
|
|
||||||
<version>2.6.0</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- lru hashmap -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
|
|
||||||
<artifactId>concurrentlinkedhashmap-lru</artifactId>
|
|
||||||
<version>1.4.2</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.guava</groupId>
|
|
||||||
<artifactId>guava</artifactId>
|
|
||||||
<version>31.0.1-jre</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.code.gson</groupId>
|
|
||||||
<artifactId>gson</artifactId>
|
|
||||||
<version>2.8.8</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.googlecode.aviator</groupId>
|
|
||||||
<artifactId>aviator</artifactId>
|
|
||||||
<version>5.2.7</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- plugins -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<artifactId>sample-plugin</artifactId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
|
|
||||||
<build>
|
|
||||||
<finalName>hertz-beat-collector</finalName>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>com.alipay.sofa</groupId>
|
|
||||||
<artifactId>sofa-ark-maven-plugin</artifactId>
|
|
||||||
<version>1.1.6</version>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>default-cli</id>
|
|
||||||
|
|
||||||
<!--goal executed to generate executable-ark-jar -->
|
|
||||||
<goals>
|
|
||||||
<goal>repackage</goal>
|
|
||||||
</goals>
|
|
||||||
|
|
||||||
<configuration>
|
|
||||||
<!--specify destination where executable-ark-jar will be saved, default saved to ${project.build.directory}-->
|
|
||||||
<outputDirectory>./target</outputDirectory>
|
|
||||||
|
|
||||||
<!--default none-->
|
|
||||||
<arkClassifier>executable</arkClassifier>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
|
||||||
<version>3.3.0</version>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>make-zip</id>
|
|
||||||
<!--绑定的maven操作-->
|
|
||||||
<phase>package</phase>
|
|
||||||
<!--运行一次-->
|
|
||||||
<goals>
|
|
||||||
<goal>single</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<descriptors>
|
|
||||||
<descriptor>../assembly/collector/assembly.xml</descriptor>
|
|
||||||
</descriptors>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
|
|
||||||
</project>
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package com.usthe.collector;
|
|
||||||
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.springframework.boot.SpringApplication;
|
|
||||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
|
||||||
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* collector start
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/10/7 18:02
|
|
||||||
*/
|
|
||||||
@SpringBootApplication(exclude = {KafkaAutoConfiguration.class})
|
|
||||||
@Slf4j
|
|
||||||
public class Collector {
|
|
||||||
public static void main(String[] args) {
|
|
||||||
SpringApplication.run(Collector.class, args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package com.usthe.collector.dispatch;
|
|
||||||
|
|
||||||
import com.googlecode.aviator.AviatorEvaluator;
|
|
||||||
import com.googlecode.aviator.Options;
|
|
||||||
import org.springframework.context.annotation.Bean;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/11/3 12:55
|
|
||||||
*/
|
|
||||||
@Configuration
|
|
||||||
public class DispatchConfiguration {
|
|
||||||
|
|
||||||
private static final int AVIATOR_LRU_CACHE_SIZE = 1024;
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public void configAviatorEvaluator() {
|
|
||||||
// 配置AviatorEvaluator使用LRU缓存编译后的表达式
|
|
||||||
AviatorEvaluator.getInstance()
|
|
||||||
.useLRUExpressionCache(AVIATOR_LRU_CACHE_SIZE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
package com.usthe.collector.dispatch.entrance.http;
|
|
||||||
|
|
||||||
import com.usthe.collector.dispatch.timer.TimerDispatch;
|
|
||||||
import com.usthe.common.entity.job.Job;
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import com.usthe.common.util.ProtoJsonUtil;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
import org.springframework.http.MediaType;
|
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 采集job管理提供api接口
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/11/6 13:58
|
|
||||||
*/
|
|
||||||
@RestController
|
|
||||||
public class CollectJobController {
|
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private TimerDispatch timerDispatch;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 执行一次性采集任务,获取采集数据响应
|
|
||||||
* @return 采集结果
|
|
||||||
*/
|
|
||||||
@PostMapping(path = "/job/sync", consumes = MediaType.APPLICATION_JSON_VALUE,
|
|
||||||
produces = MediaType.APPLICATION_JSON_VALUE)
|
|
||||||
public Mono<List<String>> collectSyncJobData(@RequestBody Job job) {
|
|
||||||
return Mono.create(sink -> {
|
|
||||||
CollectResponseEventListener listener = new CollectResponseEventListener() {
|
|
||||||
@Override
|
|
||||||
public void response(List<CollectRep.MetricsData> responseMetrics) {
|
|
||||||
if (responseMetrics == null || responseMetrics.isEmpty()) {
|
|
||||||
sink.success();
|
|
||||||
} else {
|
|
||||||
List<String> jsons = new ArrayList<>(responseMetrics.size());
|
|
||||||
for (CollectRep.MetricsData metricsData : responseMetrics) {
|
|
||||||
String json = ProtoJsonUtil.toJsonStr(metricsData);
|
|
||||||
if (json != null) {
|
|
||||||
jsons.add(json);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sink.success(jsons);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
timerDispatch.addJob(job, listener);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
package com.usthe.collector.dispatch.export;
|
|
||||||
|
|
||||||
import com.usthe.collector.dispatch.DispatchProperties;
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.clients.producer.KafkaProducer;
|
|
||||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
|
||||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
|
||||||
import org.apache.kafka.common.serialization.LongSerializer;
|
|
||||||
import org.springframework.beans.factory.DisposableBean;
|
|
||||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
|
||||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* kafka采集数据消息发送
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/11/3 15:22
|
|
||||||
*/
|
|
||||||
@Configuration
|
|
||||||
@ConditionalOnProperty(prefix = "collector.dispatch.export.kafka",
|
|
||||||
name = "enabled", havingValue = "true", matchIfMissing = true)
|
|
||||||
@AutoConfigureAfter(value = {DispatchProperties.class})
|
|
||||||
@Slf4j
|
|
||||||
public class KafkaDataExporter implements DisposableBean {
|
|
||||||
|
|
||||||
KafkaProducer<Long, CollectRep.MetricsData> kafkaProducer;
|
|
||||||
DispatchProperties.ExportProperties.KafkaProperties kafkaProperties;
|
|
||||||
public KafkaDataExporter(DispatchProperties dispatchProperties) {
|
|
||||||
try {
|
|
||||||
kafkaProperties = dispatchProperties.getExport().getKafka();
|
|
||||||
Properties properties = new Properties();
|
|
||||||
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getServers());
|
|
||||||
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
|
|
||||||
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaMetricsDataSerializer.class);
|
|
||||||
kafkaProducer = new KafkaProducer<>(properties);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 发送消息
|
|
||||||
* @param metricsData 指标组采集数据
|
|
||||||
*/
|
|
||||||
public void send(CollectRep.MetricsData metricsData) {
|
|
||||||
if (kafkaProducer != null) {
|
|
||||||
kafkaProducer.send(new ProducerRecord<>(kafkaProperties.getTopic(), metricsData.getId(), metricsData));
|
|
||||||
} else {
|
|
||||||
log.error("kafkaProducer is not enable");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void destroy() throws Exception {
|
|
||||||
if (kafkaProducer != null) {
|
|
||||||
kafkaProducer.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
package com.usthe.collector.dispatch.export;
|
|
||||||
|
|
||||||
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import org.apache.kafka.common.serialization.Serializer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* MetricsData的序列化
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/11/3 16:14
|
|
||||||
*/
|
|
||||||
public class KafkaMetricsDataSerializer implements Serializer<CollectRep.MetricsData> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] serialize(String topicName, CollectRep.MetricsData metricsData) {
|
|
||||||
return metricsData.toByteArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
package com.usthe.collector.plugin;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/10/8 15:12
|
|
||||||
*/
|
|
||||||
public class SameClass {
|
|
||||||
|
|
||||||
public static String hello() {
|
|
||||||
return "hello collector";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
package com.usthe.collector.plugin;
|
|
||||||
|
|
||||||
import com.usthe.plugin.sample.ExportDemo;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.springframework.boot.CommandLineRunner;
|
|
||||||
import org.springframework.stereotype.Component;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author tomsun28
|
|
||||||
* @date 2021/10/8 15:31
|
|
||||||
*/
|
|
||||||
@Component
|
|
||||||
@Slf4j
|
|
||||||
public class TestPlugin implements CommandLineRunner {
|
|
||||||
@Override
|
|
||||||
public void run(String... args) throws Exception {
|
|
||||||
log.info(SameClass.hello());
|
|
||||||
log.info(new ExportDemo().hello());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
server:
|
|
||||||
port: 1157
|
|
||||||
spring:
|
|
||||||
application:
|
|
||||||
name: ${HOSTNAME:@collecor@}${PID}
|
|
||||||
profiles:
|
|
||||||
active: dev
|
|
||||||
jackson:
|
|
||||||
default-property-inclusion: NON_EMPTY
|
|
||||||
collector:
|
|
||||||
dispatch:
|
|
||||||
entrance:
|
|
||||||
etcd:
|
|
||||||
endpoints: http://139.198.109.64:2379
|
|
||||||
export:
|
|
||||||
kafka:
|
|
||||||
enabled: true
|
|
||||||
servers: 139.198.109.64:9092
|
|
||||||
topic: async-collect-data
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗ ██████╗ ██████╗
|
|
||||||
██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗
|
|
||||||
██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║ ██║██████╔╝
|
|
||||||
██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║ ██║██╔══██╗ Profile: ${spring.profiles.active}
|
|
||||||
╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ╚██████╔╝██║ ██║ Name: ${spring.application.name} Port: ${server.port} Pid: ${pid}
|
|
||||||
╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<configuration scan="true">
|
|
||||||
<springProperty scope="context" name="application_name" source="spring.application.name" defaultValue="collector"/>
|
|
||||||
<!-- 输出日志到控制台 ConsoleAppender -->
|
|
||||||
<appender name="ConsoleAppender" class="ch.qos.logback.core.ConsoleAppender">
|
|
||||||
<encoder>
|
|
||||||
<!--<pattern>%d %p (%file:%line\)- %m%n</pattern>-->
|
|
||||||
<!--格式化输出:%d:表示日期 %thread:表示线程名 %-5level:级别从左显示5个字符宽度 %msg:日志消息 %n:是换行符-->
|
|
||||||
<pattern>1-%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger - %msg%n</pattern>
|
|
||||||
<charset>UTF-8</charset>
|
|
||||||
</encoder>
|
|
||||||
</appender>
|
|
||||||
|
|
||||||
<appender name="SystemOutFileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
|
||||||
<!-- 日志记录器的滚动策略,按日期,按大小记录 -->
|
|
||||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
|
||||||
<!-- 归档的日志文件的路径。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
|
|
||||||
<fileNamePattern>logs/${application_name}-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
|
|
||||||
<!-- 除按日志记录之外,还配置了日志文件不能超过200M,若超过200M,日志文件会以索引0开始 -->
|
|
||||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
|
||||||
<maxFileSize>200MB</maxFileSize>
|
|
||||||
</timeBasedFileNamingAndTriggeringPolicy>
|
|
||||||
</rollingPolicy>
|
|
||||||
<!-- 追加方式记录日志 -->
|
|
||||||
<append>true</append>
|
|
||||||
<!-- 日志文件的格式 -->
|
|
||||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
|
|
||||||
<pattern>===%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %logger Line:%-3L - %msg%n</pattern>
|
|
||||||
<charset>utf-8</charset>
|
|
||||||
</encoder>
|
|
||||||
</appender>
|
|
||||||
|
|
||||||
<appender name="ErrOutFileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
|
||||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
|
||||||
<fileNamePattern>logs/${application_name}-%d{yyyy-MM-dd}-error.%i.log</fileNamePattern>
|
|
||||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
|
||||||
<maxFileSize>200MB</maxFileSize>
|
|
||||||
</timeBasedFileNamingAndTriggeringPolicy>
|
|
||||||
</rollingPolicy>
|
|
||||||
<!-- 追加方式记录日志 -->
|
|
||||||
<append>true</append>
|
|
||||||
<!-- 日志文件的格式 -->
|
|
||||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
|
|
||||||
<pattern>===%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %logger Line:%-3L - %msg%n</pattern>
|
|
||||||
<charset>utf-8</charset>
|
|
||||||
</encoder>
|
|
||||||
<!-- 此日志文件记录error及以上级别的 -->
|
|
||||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
|
||||||
<level>ERROR</level>
|
|
||||||
</filter>
|
|
||||||
</appender>
|
|
||||||
|
|
||||||
<!--这个logger的设置是:举例在org.springframework包下面的所有输出日志必须级别level在info及以上级别才会被输出!-->
|
|
||||||
<!--这样可以避免输出一些spring框架的许多常见debug信息!-->
|
|
||||||
<logger name="org.springframework" level="info" />
|
|
||||||
<logger name="org.json" level="error"/>
|
|
||||||
<logger name="io.netty" level="info"/>
|
|
||||||
<logger name="org.slf4j" level="info"/>
|
|
||||||
<logger name="ch.qos.logback" level="info"/>
|
|
||||||
|
|
||||||
<!-- 生产环境配置 -->
|
|
||||||
<springProfile name="prod">
|
|
||||||
<root level="DEBUG">
|
|
||||||
<appender-ref ref="ConsoleAppender"/>
|
|
||||||
<appender-ref ref="SystemOutFileAppender"/>
|
|
||||||
<appender-ref ref="ErrOutFileAppender"/>
|
|
||||||
</root>
|
|
||||||
</springProfile>
|
|
||||||
|
|
||||||
<!-- 开发环境配置 -->
|
|
||||||
<springProfile name="dev">
|
|
||||||
<root level="INFO">
|
|
||||||
<appender-ref ref="ConsoleAppender"/>
|
|
||||||
<appender-ref ref="SystemOutFileAppender"/>
|
|
||||||
<appender-ref ref="ErrOutFileAppender"/>
|
|
||||||
</root>
|
|
||||||
</springProfile>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
package com.usthe.collector.dispatch;
|
package com.usthe.collector.dispatch;
|
||||||
|
|
||||||
import com.usthe.collector.dispatch.export.KafkaDataExporter;
|
import com.usthe.collector.dispatch.export.MetricsDataExporter;
|
||||||
import com.usthe.collector.dispatch.timer.Timeout;
|
import com.usthe.collector.dispatch.timer.Timeout;
|
||||||
import com.usthe.collector.dispatch.timer.TimerDispatch;
|
import com.usthe.collector.dispatch.timer.TimerDispatch;
|
||||||
import com.usthe.collector.dispatch.timer.WheelTimerTask;
|
import com.usthe.collector.dispatch.timer.WheelTimerTask;
|
||||||
@@ -44,14 +44,14 @@ public class CommonDispatcher implements MetricsTaskDispatch, CollectDataDispatc
|
|||||||
/**
|
/**
|
||||||
* kafka采集数据导出器
|
* kafka采集数据导出器
|
||||||
*/
|
*/
|
||||||
private KafkaDataExporter kafkaDataExporter;
|
private MetricsDataExporter kafkaDataExporter;
|
||||||
/**
|
/**
|
||||||
* 指标组任务与开始时间映射map
|
* 指标组任务与开始时间映射map
|
||||||
*/
|
*/
|
||||||
private Map<String, MetricsTime> metricsTimeoutMonitorMap;
|
private Map<String, MetricsTime> metricsTimeoutMonitorMap;
|
||||||
|
|
||||||
public CommonDispatcher(MetricsCollectorQueue jobRequestQueue, TimerDispatch timerDispatch,
|
public CommonDispatcher(MetricsCollectorQueue jobRequestQueue, TimerDispatch timerDispatch,
|
||||||
KafkaDataExporter kafkaDataExporter, WorkerPool workerPool) {
|
MetricsDataExporter kafkaDataExporter, WorkerPool workerPool) {
|
||||||
this.kafkaDataExporter = kafkaDataExporter;
|
this.kafkaDataExporter = kafkaDataExporter;
|
||||||
this.jobRequestQueue = jobRequestQueue;
|
this.jobRequestQueue = jobRequestQueue;
|
||||||
this.timerDispatch = timerDispatch;
|
this.timerDispatch = timerDispatch;
|
||||||
@@ -0,0 +1,83 @@
|
|||||||
|
package com.usthe.collector.dispatch.entrance.internal;
|
||||||
|
|
||||||
|
import com.usthe.collector.dispatch.timer.TimerDispatch;
|
||||||
|
import com.usthe.common.entity.job.Job;
|
||||||
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
|
import com.usthe.common.util.SnowFlakeIdGenerator;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 采集job管理提供api接口
|
||||||
|
* @author tomsun28
|
||||||
|
* @date 2021/11/6 13:58
|
||||||
|
*/
|
||||||
|
@Service
|
||||||
|
@Slf4j
|
||||||
|
public class CollectJobService {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private TimerDispatch timerDispatch;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 执行一次性采集任务,获取采集数据响应
|
||||||
|
* @param job 采集任务详情
|
||||||
|
* @return 采集结果
|
||||||
|
*/
|
||||||
|
public List<CollectRep.MetricsData> collectSyncJobData(Job job) {
|
||||||
|
final List<CollectRep.MetricsData> metricsData = new LinkedList<>();
|
||||||
|
final CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||||
|
CollectResponseEventListener listener = new CollectResponseEventListener() {
|
||||||
|
@Override
|
||||||
|
public void response(List<CollectRep.MetricsData> responseMetrics) {
|
||||||
|
if (responseMetrics != null) {
|
||||||
|
metricsData.addAll(responseMetrics);
|
||||||
|
}
|
||||||
|
countDownLatch.countDown();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
timerDispatch.addJob(job, listener);
|
||||||
|
try {
|
||||||
|
countDownLatch.await(100, TimeUnit.SECONDS);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.info("同步任务运行100秒无响应,返回");
|
||||||
|
}
|
||||||
|
return metricsData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 下发周期性异步采集任务
|
||||||
|
* @param job 采集任务详情
|
||||||
|
* @return long 任务ID
|
||||||
|
*/
|
||||||
|
public long addAsyncCollectJob(Job job) {
|
||||||
|
long jobId = SnowFlakeIdGenerator.generateId();
|
||||||
|
job.setId(jobId);
|
||||||
|
timerDispatch.addJob(job, null);
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 更新已经下发的周期性异步采集任务
|
||||||
|
* @param modifyJob 采集任务详情
|
||||||
|
*/
|
||||||
|
public void updateAsyncCollectJob(Job modifyJob) {
|
||||||
|
timerDispatch.deleteJob(modifyJob.getId(), true);
|
||||||
|
timerDispatch.addJob(modifyJob, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 取消周期性异步采集任务
|
||||||
|
* @param jobId 任务ID
|
||||||
|
*/
|
||||||
|
public void cancelAsyncCollectJob(Long jobId) {
|
||||||
|
timerDispatch.deleteJob(jobId, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.usthe.collector.dispatch.entrance.http;
|
package com.usthe.collector.dispatch.entrance.internal;
|
||||||
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
|
|
||||||
@@ -16,5 +16,5 @@ public interface CollectResponseEventListener extends EventListener {
|
|||||||
* 采集任务完成结果通知
|
* 采集任务完成结果通知
|
||||||
* @param responseMetrics 响应数据
|
* @param responseMetrics 响应数据
|
||||||
*/
|
*/
|
||||||
public default void response(List<CollectRep.MetricsData> responseMetrics) {}
|
default void response(List<CollectRep.MetricsData> responseMetrics) {}
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
package com.usthe.collector.dispatch.export;
|
||||||
|
|
||||||
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.DisposableBean;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 采集数据消息发送
|
||||||
|
* @author tomsun28
|
||||||
|
* @date 2021/11/3 15:22
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Slf4j
|
||||||
|
public class MetricsDataExporter implements DisposableBean {
|
||||||
|
|
||||||
|
private final LinkedBlockingQueue<CollectRep.MetricsData> metricsDataToAlertQueue;
|
||||||
|
private final LinkedBlockingQueue<CollectRep.MetricsData> metricsDataToWarehouseInfluxQueue;
|
||||||
|
private final LinkedBlockingQueue<CollectRep.MetricsData> metricsDataToWarehouseRedisQueue;
|
||||||
|
|
||||||
|
public MetricsDataExporter() {
|
||||||
|
metricsDataToAlertQueue = new LinkedBlockingQueue<>();
|
||||||
|
metricsDataToWarehouseInfluxQueue = new LinkedBlockingQueue<>();
|
||||||
|
metricsDataToWarehouseRedisQueue = new LinkedBlockingQueue<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectRep.MetricsData pollAlertMetricsData() throws InterruptedException {
|
||||||
|
return metricsDataToAlertQueue.poll(2, TimeUnit.SECONDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectRep.MetricsData pollWarehouseInfluxMetricsData() throws InterruptedException {
|
||||||
|
return metricsDataToAlertQueue.poll(2, TimeUnit.SECONDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectRep.MetricsData pollWarehouseRedisMetricsData() throws InterruptedException {
|
||||||
|
return metricsDataToWarehouseRedisQueue.poll(2, TimeUnit.SECONDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 发送消息
|
||||||
|
* @param metricsData 指标组采集数据
|
||||||
|
*/
|
||||||
|
public void send(CollectRep.MetricsData metricsData) {
|
||||||
|
metricsDataToAlertQueue.offer(metricsData);
|
||||||
|
metricsDataToWarehouseInfluxQueue.offer(metricsData);
|
||||||
|
metricsDataToWarehouseRedisQueue.offer(metricsData);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void destroy() throws Exception {
|
||||||
|
metricsDataToAlertQueue.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
package com.usthe.collector.dispatch.timer;
|
package com.usthe.collector.dispatch.timer;
|
||||||
|
|
||||||
|
|
||||||
import com.usthe.collector.dispatch.entrance.http.CollectResponseEventListener;
|
import com.usthe.collector.dispatch.entrance.internal.CollectResponseEventListener;
|
||||||
import com.usthe.common.entity.job.Job;
|
import com.usthe.common.entity.job.Job;
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
package com.usthe.collector.dispatch.timer;
|
package com.usthe.collector.dispatch.timer;
|
||||||
|
|
||||||
import com.usthe.collector.dispatch.entrance.http.CollectResponseEventListener;
|
import com.usthe.collector.dispatch.entrance.internal.CollectResponseEventListener;
|
||||||
import com.usthe.common.entity.job.Job;
|
import com.usthe.common.entity.job.Job;
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
9
collector/src/main/resources/META-INF/spring.factories
Normal file
9
collector/src/main/resources/META-INF/spring.factories
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
|
||||||
|
com.usthe.collector.dispatch.timer.TimerDispatcher,\
|
||||||
|
com.usthe.collector.dispatch.CommonDispatcher,\
|
||||||
|
com.usthe.collector.dispatch.DispatchProperties,\
|
||||||
|
com.usthe.collector.dispatch.MetricsCollectorQueue,\
|
||||||
|
com.usthe.collector.dispatch.WorkerPool,\
|
||||||
|
com.usthe.collector.dispatch.entrance.internal.CollectJobService,\
|
||||||
|
com.usthe.collector.dispatch.export.MetricsDataExporter,\
|
||||||
|
com.usthe.collector.util.SpringContextHolder
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>manager</artifactId>
|
<artifactId>manager</artifactId>
|
||||||
@@ -22,25 +22,25 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>common</artifactId>
|
<artifactId>common</artifactId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</dependency>
|
|
||||||
<!-- scheduler -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
|
||||||
<artifactId>scheduler</artifactId>
|
|
||||||
<version>1.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- data warehouse -->
|
<!-- data warehouse -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>warehouse</artifactId>
|
<artifactId>warehouse</artifactId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- alerter -->
|
<!-- alerter -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>alerter</artifactId>
|
<artifactId>alerter</artifactId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- collector -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
|
<artifactId>collector</artifactId>
|
||||||
|
<version>1.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- spring -->
|
<!-- spring -->
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -123,6 +123,7 @@
|
|||||||
<include>sureness.yml</include>
|
<include>sureness.yml</include>
|
||||||
<include>banner.txt</include>
|
<include>banner.txt</include>
|
||||||
<include>db/**</include>
|
<include>db/**</include>
|
||||||
|
<include>define/**</include>
|
||||||
</includes>
|
</includes>
|
||||||
</resource>
|
</resource>
|
||||||
</resources>
|
</resources>
|
||||||
@@ -167,7 +168,7 @@
|
|||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<descriptors>
|
<descriptors>
|
||||||
<descriptor>../assembly/server/assembly.xml</descriptor>
|
<descriptor>../script/assembly/server/assembly.xml</descriptor>
|
||||||
</descriptors>
|
</descriptors>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package com.usthe.manager.service.impl;
|
package com.usthe.manager.service.impl;
|
||||||
|
|
||||||
|
import com.usthe.collector.dispatch.entrance.internal.CollectJobService;
|
||||||
import com.usthe.common.entity.job.Configmap;
|
import com.usthe.common.entity.job.Configmap;
|
||||||
import com.usthe.common.entity.job.Job;
|
import com.usthe.common.entity.job.Job;
|
||||||
import com.usthe.common.entity.job.Metrics;
|
import com.usthe.common.entity.job.Metrics;
|
||||||
@@ -20,7 +21,6 @@ import com.usthe.manager.service.AppService;
|
|||||||
import com.usthe.manager.service.MonitorService;
|
import com.usthe.manager.service.MonitorService;
|
||||||
import com.usthe.manager.support.exception.MonitorDatabaseException;
|
import com.usthe.manager.support.exception.MonitorDatabaseException;
|
||||||
import com.usthe.manager.support.exception.MonitorDetectException;
|
import com.usthe.manager.support.exception.MonitorDetectException;
|
||||||
import com.usthe.scheduler.JobScheduling;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.data.domain.Page;
|
import org.springframework.data.domain.Page;
|
||||||
@@ -52,7 +52,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
private AppService appService;
|
private AppService appService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private JobScheduling jobScheduling;
|
private CollectJobService collectJobService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private MonitorDao monitorDao;
|
private MonitorDao monitorDao;
|
||||||
@@ -74,7 +74,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
List<Configmap> configmaps = params.stream().map(param ->
|
List<Configmap> configmaps = params.stream().map(param ->
|
||||||
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
||||||
appDefine.setConfigmap(configmaps);
|
appDefine.setConfigmap(configmaps);
|
||||||
List<CollectRep.MetricsData> collectRep = jobScheduling.addSyncCollectJob(appDefine);
|
List<CollectRep.MetricsData> collectRep = collectJobService.collectSyncJobData(appDefine);
|
||||||
// 判断探测结果 失败则抛出探测异常
|
// 判断探测结果 失败则抛出探测异常
|
||||||
if (collectRep == null || collectRep.isEmpty()) {
|
if (collectRep == null || collectRep.isEmpty()) {
|
||||||
throw new MonitorDetectException("No collector response");
|
throw new MonitorDetectException("No collector response");
|
||||||
@@ -101,7 +101,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
}).collect(Collectors.toList());
|
}).collect(Collectors.toList());
|
||||||
appDefine.setConfigmap(configmaps);
|
appDefine.setConfigmap(configmaps);
|
||||||
// 下发采集任务得到jobId
|
// 下发采集任务得到jobId
|
||||||
long jobId = jobScheduling.addAsyncCollectJob(appDefine);
|
long jobId = collectJobService.addAsyncCollectJob(appDefine);
|
||||||
// 下发成功后刷库
|
// 下发成功后刷库
|
||||||
try {
|
try {
|
||||||
monitor.setId(monitorId);
|
monitor.setId(monitorId);
|
||||||
@@ -112,7 +112,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error(e.getMessage(), e);
|
log.error(e.getMessage(), e);
|
||||||
// 刷库异常取消之前的下发任务
|
// 刷库异常取消之前的下发任务
|
||||||
jobScheduling.cancelAsyncCollectJob(jobId);
|
collectJobService.cancelAsyncCollectJob(jobId);
|
||||||
throw new MonitorDatabaseException(e.getMessage());
|
throw new MonitorDatabaseException(e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -225,7 +225,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
||||||
appDefine.setConfigmap(configmaps);
|
appDefine.setConfigmap(configmaps);
|
||||||
// 更新采集任务
|
// 更新采集任务
|
||||||
jobScheduling.updateAsyncCollectJob(appDefine);
|
collectJobService.updateAsyncCollectJob(appDefine);
|
||||||
// 下发更新成功后刷库
|
// 下发更新成功后刷库
|
||||||
try {
|
try {
|
||||||
monitor.setJobId(preMonitor.getJobId());
|
monitor.setJobId(preMonitor.getJobId());
|
||||||
@@ -246,7 +246,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
Monitor monitor = monitorOptional.get();
|
Monitor monitor = monitorOptional.get();
|
||||||
monitorDao.deleteById(id);
|
monitorDao.deleteById(id);
|
||||||
paramDao.deleteParamsByMonitorId(id);
|
paramDao.deleteParamsByMonitorId(id);
|
||||||
jobScheduling.cancelAsyncCollectJob(monitor.getJobId());
|
collectJobService.cancelAsyncCollectJob(monitor.getJobId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -258,7 +258,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
monitorDao.deleteAll(monitors);
|
monitorDao.deleteAll(monitors);
|
||||||
paramDao.deleteParamsByMonitorIdIn(ids);
|
paramDao.deleteParamsByMonitorIdIn(ids);
|
||||||
for (Monitor monitor : monitors) {
|
for (Monitor monitor : monitors) {
|
||||||
jobScheduling.cancelAsyncCollectJob(monitor.getJobId());
|
collectJobService.cancelAsyncCollectJob(monitor.getJobId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -299,7 +299,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
if (!managedMonitors.isEmpty()) {
|
if (!managedMonitors.isEmpty()) {
|
||||||
monitorDao.saveAll(managedMonitors);
|
monitorDao.saveAll(managedMonitors);
|
||||||
for (Monitor monitor : managedMonitors) {
|
for (Monitor monitor : managedMonitors) {
|
||||||
jobScheduling.cancelAsyncCollectJob(monitor.getJobId());
|
collectJobService.cancelAsyncCollectJob(monitor.getJobId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -326,7 +326,7 @@ public class MonitorServiceImpl implements MonitorService {
|
|||||||
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
new Configmap(param.getField(), param.getValue(), param.getType())).collect(Collectors.toList());
|
||||||
appDefine.setConfigmap(configmaps);
|
appDefine.setConfigmap(configmaps);
|
||||||
// 下发采集任务
|
// 下发采集任务
|
||||||
jobScheduling.addAsyncCollectJob(appDefine);
|
collectJobService.addAsyncCollectJob(appDefine);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ package com.usthe.manager.support;
|
|||||||
import com.usthe.common.entity.dto.Message;
|
import com.usthe.common.entity.dto.Message;
|
||||||
import com.usthe.manager.support.exception.MonitorDatabaseException;
|
import com.usthe.manager.support.exception.MonitorDatabaseException;
|
||||||
import com.usthe.manager.support.exception.MonitorDetectException;
|
import com.usthe.manager.support.exception.MonitorDetectException;
|
||||||
import com.usthe.scheduler.ScheduleException;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.dao.DataAccessException;
|
import org.springframework.dao.DataAccessException;
|
||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
@@ -133,23 +132,6 @@ public class GlobalExceptionHandler {
|
|||||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(message);
|
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 处理分发调度器异常问题
|
|
||||||
* @param exception 调度器异常问题
|
|
||||||
* @return response
|
|
||||||
*/
|
|
||||||
@ExceptionHandler(ScheduleException.class)
|
|
||||||
@ResponseBody
|
|
||||||
ResponseEntity<Message<Void>> handleScheduleException(ScheduleException exception) {
|
|
||||||
String errorMessage = "scheduler warning";
|
|
||||||
if (exception != null) {
|
|
||||||
errorMessage = exception.getMessage();
|
|
||||||
}
|
|
||||||
log.warn("[scheduler warning]-{}", errorMessage);
|
|
||||||
Message<Void> message = Message.<Void>builder().msg(errorMessage).code(MONITOR_CONFLICT_CODE).build();
|
|
||||||
return ResponseEntity.status(HttpStatus.CONFLICT).body(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* handler the exception thrown for datastore error
|
* handler the exception thrown for datastore error
|
||||||
* @param exception datastore exception
|
* @param exception datastore exception
|
||||||
|
|||||||
3
pom.xml
3
pom.xml
@@ -7,9 +7,8 @@
|
|||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
<modules>
|
<modules>
|
||||||
<module>scheduler</module>
|
|
||||||
<module>manager</module>
|
<module>manager</module>
|
||||||
<module>alerter</module>
|
<module>alerter</module>
|
||||||
<module>common</module>
|
<module>common</module>
|
||||||
|
|||||||
9
script/assembly/package-build.sh
Normal file
9
script/assembly/package-build.sh
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd ../../web-app
|
||||||
|
|
||||||
|
ng build --prod --base-href /console/
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
mvn clean package
|
||||||
@@ -22,8 +22,11 @@ http://maven.apache.org/ASSEMBLY/2.0.0 ">
|
|||||||
<fileSets>
|
<fileSets>
|
||||||
<!--bin文件下的所有脚本文件输出到打包后的bin目录下-->
|
<!--bin文件下的所有脚本文件输出到打包后的bin目录下-->
|
||||||
<fileSet>
|
<fileSet>
|
||||||
<directory>../assembly/server/bin</directory>
|
<directory>../script/assembly/server/bin</directory>
|
||||||
|
<!-- 是否进行属性替换 即使用 ${project.artifactId} -->
|
||||||
|
<filtered>true</filtered>
|
||||||
<outputDirectory>bin</outputDirectory>
|
<outputDirectory>bin</outputDirectory>
|
||||||
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
|
||||||
<!-- src/main/resources目录下配置文件打包到config目录下 -->
|
<!-- src/main/resources目录下配置文件打包到config目录下 -->
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<artifactId>monitor</artifactId>
|
<artifactId>monitor</artifactId>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
@@ -16,7 +16,14 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.usthe.tancloud</groupId>
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
<artifactId>common</artifactId>
|
<artifactId>common</artifactId>
|
||||||
<version>1.0-SNAPSHOT</version>
|
<version>1.0</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- collector -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.usthe.tancloud</groupId>
|
||||||
|
<artifactId>collector</artifactId>
|
||||||
|
<version>1.0</version>
|
||||||
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- spring -->
|
<!-- spring -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|||||||
@@ -1,81 +0,0 @@
|
|||||||
package com.usthe.warehouse.entrance;
|
|
||||||
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import com.usthe.warehouse.MetricsDataQueue;
|
|
||||||
import com.usthe.warehouse.WarehouseProperties;
|
|
||||||
import com.usthe.warehouse.WarehouseWorkerPool;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
|
||||||
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|
||||||
import org.apache.kafka.common.serialization.LongDeserializer;
|
|
||||||
import org.springframework.beans.factory.DisposableBean;
|
|
||||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
|
||||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 从Kafka消费指标组采集数据处理
|
|
||||||
* @author tom
|
|
||||||
* @date 2021/11/24 18:03
|
|
||||||
*/
|
|
||||||
@Configuration
|
|
||||||
@AutoConfigureAfter(value = {WarehouseProperties.class})
|
|
||||||
@ConditionalOnProperty(prefix = "warehouse.entrance.kafka",
|
|
||||||
name = "enabled", havingValue = "true", matchIfMissing = true)
|
|
||||||
@Slf4j
|
|
||||||
public class KafkaDataConsume implements DisposableBean {
|
|
||||||
|
|
||||||
private KafkaConsumer<Long, CollectRep.MetricsData> consumer;
|
|
||||||
private WarehouseWorkerPool workerPool;
|
|
||||||
private MetricsDataQueue dataQueue;
|
|
||||||
public KafkaDataConsume(WarehouseProperties properties, WarehouseWorkerPool workerPool,
|
|
||||||
MetricsDataQueue dataQueue) {
|
|
||||||
this.workerPool = workerPool;
|
|
||||||
this.dataQueue = dataQueue;
|
|
||||||
initConsumer(properties);
|
|
||||||
startConsumeData();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void startConsumeData() {
|
|
||||||
Runnable runnable = () -> {
|
|
||||||
Thread.currentThread().setName("warehouse-kafka-data-consumer");
|
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
|
||||||
ConsumerRecords<Long, CollectRep.MetricsData> records = consumer.poll(Duration.ofMillis(100));
|
|
||||||
records.forEach(record -> {
|
|
||||||
dataQueue.addMetricsDataToInflux(record.value());
|
|
||||||
dataQueue.addMetricsDataToRedis(record.value());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
workerPool.executeJob(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initConsumer(WarehouseProperties properties) {
|
|
||||||
if (properties == null || properties.getEntrance() == null || properties.getEntrance().getKafka() == null) {
|
|
||||||
log.error("init error, please config Warehouse kafka props in application.yml");
|
|
||||||
throw new IllegalArgumentException("please config Warehouse kafka props");
|
|
||||||
}
|
|
||||||
WarehouseProperties.EntranceProperties.KafkaProperties kafkaProp = properties.getEntrance().getKafka();
|
|
||||||
Properties consumerProp = new Properties();
|
|
||||||
consumerProp.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProp.getServers());
|
|
||||||
consumerProp.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProp.getGroupId());
|
|
||||||
consumerProp.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
|
|
||||||
consumerProp.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaMetricsDataDeserializer.class);
|
|
||||||
consumerProp.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
|
|
||||||
consumerProp.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
|
|
||||||
consumer = new KafkaConsumer<>(consumerProp);
|
|
||||||
consumer.subscribe(Collections.singleton(kafkaProp.getTopic()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void destroy() throws Exception {
|
|
||||||
if (consumer != null) {
|
|
||||||
consumer.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
package com.usthe.warehouse.entrance;
|
|
||||||
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.common.serialization.Deserializer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* MetricsData的反序列化
|
|
||||||
* @author tom
|
|
||||||
* @date 2021/11/24 17:29
|
|
||||||
*/
|
|
||||||
@Slf4j
|
|
||||||
public class KafkaMetricsDataDeserializer implements Deserializer<CollectRep.MetricsData> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CollectRep.MetricsData deserialize(String topicName, byte[] bytes) {
|
|
||||||
try {
|
|
||||||
return CollectRep.MetricsData.parseFrom(bytes);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,134 +0,0 @@
|
|||||||
package com.usthe.warehouse.store;
|
|
||||||
|
|
||||||
import com.influxdb.client.InfluxDBClient;
|
|
||||||
import com.influxdb.client.InfluxDBClientFactory;
|
|
||||||
import com.influxdb.client.WriteApi;
|
|
||||||
import com.influxdb.client.WriteOptions;
|
|
||||||
import com.influxdb.client.domain.WritePrecision;
|
|
||||||
import com.influxdb.client.write.Point;
|
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
|
||||||
import com.usthe.common.util.CommonConstants;
|
|
||||||
import com.usthe.warehouse.MetricsDataQueue;
|
|
||||||
import com.usthe.warehouse.WarehouseProperties;
|
|
||||||
import com.usthe.warehouse.WarehouseWorkerPool;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.springframework.beans.factory.DisposableBean;
|
|
||||||
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
|
|
||||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
|
||||||
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* influxdb存储采集数据
|
|
||||||
* @author tom
|
|
||||||
* @date 2021/11/24 18:23
|
|
||||||
*/
|
|
||||||
@Configuration
|
|
||||||
@AutoConfigureAfter(value = {WarehouseProperties.class})
|
|
||||||
@ConditionalOnProperty(prefix = "warehouse.store.influxdb",
|
|
||||||
name = "enabled", havingValue = "true", matchIfMissing = true)
|
|
||||||
@Slf4j
|
|
||||||
public class InfluxdbDataStorage implements DisposableBean {
|
|
||||||
|
|
||||||
private InfluxDBClient influxClient;
|
|
||||||
private WriteApi writeApi;
|
|
||||||
private WarehouseWorkerPool workerPool;
|
|
||||||
private MetricsDataQueue dataQueue;
|
|
||||||
|
|
||||||
public InfluxdbDataStorage (WarehouseProperties properties, WarehouseWorkerPool workerPool,
|
|
||||||
MetricsDataQueue dataQueue) {
|
|
||||||
this.workerPool = workerPool;
|
|
||||||
this.dataQueue = dataQueue;
|
|
||||||
initInfluxDbClient(properties);
|
|
||||||
startStorageData();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void startStorageData() {
|
|
||||||
Runnable runnable = () -> {
|
|
||||||
Thread.currentThread().setName("warehouse-influxdb-data-storage");
|
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
|
||||||
try {
|
|
||||||
CollectRep.MetricsData metricsData = dataQueue.pollInfluxMetricsData();
|
|
||||||
if (metricsData != null) {
|
|
||||||
saveData(metricsData);
|
|
||||||
}
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
log.error(e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
workerPool.executeJob(runnable);
|
|
||||||
workerPool.executeJob(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initInfluxDbClient(WarehouseProperties properties) {
|
|
||||||
if (properties == null || properties.getStore() == null || properties.getStore().getInfluxdb() == null) {
|
|
||||||
log.error("init error, please config Warehouse influxdb props in application.yml");
|
|
||||||
throw new IllegalArgumentException("please config Warehouse influxdb props");
|
|
||||||
}
|
|
||||||
WarehouseProperties.StoreProperties.InfluxdbProperties influxdbProp = properties.getStore().getInfluxdb();
|
|
||||||
influxClient = InfluxDBClientFactory.create(influxdbProp.getServers(), influxdbProp.getToken().toCharArray(),
|
|
||||||
influxdbProp.getOrg(), influxdbProp.getBucket());
|
|
||||||
WriteOptions writeOptions = WriteOptions.builder()
|
|
||||||
.batchSize(1000)
|
|
||||||
.bufferLimit(1000)
|
|
||||||
.jitterInterval(1000)
|
|
||||||
.retryInterval(5000)
|
|
||||||
.build();
|
|
||||||
writeApi = influxClient.makeWriteApi(writeOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void saveData(CollectRep.MetricsData metricsData) {
|
|
||||||
String measurement = metricsData.getApp() + "_" + metricsData.getMetrics();
|
|
||||||
String monitorId = String.valueOf(metricsData.getId());
|
|
||||||
Instant collectTime = Instant.ofEpochMilli(metricsData.getTime());
|
|
||||||
|
|
||||||
List<CollectRep.Field> fields = metricsData.getFieldsList();
|
|
||||||
for (CollectRep.ValueRow valueRow : metricsData.getValuesList()) {
|
|
||||||
Point point = Point.measurement(measurement)
|
|
||||||
.addTag("id", monitorId)
|
|
||||||
.addTag("instance", valueRow.getInstance())
|
|
||||||
.time(collectTime, WritePrecision.MS);
|
|
||||||
for (int index = 0; index < fields.size(); index++) {
|
|
||||||
CollectRep.Field field = fields.get(index);
|
|
||||||
String value = valueRow.getColumns(index);
|
|
||||||
if (field.getType() == CommonConstants.TYPE_NUMBER) {
|
|
||||||
// number data
|
|
||||||
if (CommonConstants.NULL_VALUE.equals(value)) {
|
|
||||||
point.addField(field.getName(), (Number) null);
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
double number = Double.parseDouble(value);
|
|
||||||
point.addField(field.getName(), number);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.warn(e.getMessage());
|
|
||||||
point.addField(field.getName(), (Number) null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// string
|
|
||||||
if (CommonConstants.NULL_VALUE.equals(value)) {
|
|
||||||
point.addField(field.getName(), (String) null);
|
|
||||||
} else {
|
|
||||||
point.addField(field.getName(), value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writeApi.writePoint(point);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void destroy() throws Exception {
|
|
||||||
if (writeApi != null) {
|
|
||||||
writeApi.close();
|
|
||||||
}
|
|
||||||
if (influxClient != null) {
|
|
||||||
influxClient.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
package com.usthe.warehouse.store;
|
package com.usthe.warehouse.store;
|
||||||
|
|
||||||
|
import com.usthe.collector.dispatch.export.MetricsDataExporter;
|
||||||
import com.usthe.common.entity.message.CollectRep;
|
import com.usthe.common.entity.message.CollectRep;
|
||||||
import com.usthe.warehouse.MetricsDataQueue;
|
|
||||||
import com.usthe.warehouse.WarehouseProperties;
|
import com.usthe.warehouse.WarehouseProperties;
|
||||||
import com.usthe.warehouse.WarehouseWorkerPool;
|
import com.usthe.warehouse.WarehouseWorkerPool;
|
||||||
import io.lettuce.core.RedisClient;
|
import io.lettuce.core.RedisClient;
|
||||||
@@ -34,13 +34,12 @@ public class RedisDataStorage implements DisposableBean {
|
|||||||
private RedisClient redisClient;
|
private RedisClient redisClient;
|
||||||
private StatefulRedisConnection<String, CollectRep.MetricsData> connection;
|
private StatefulRedisConnection<String, CollectRep.MetricsData> connection;
|
||||||
private WarehouseWorkerPool workerPool;
|
private WarehouseWorkerPool workerPool;
|
||||||
private MetricsDataQueue dataQueue;
|
private MetricsDataExporter dataExporter;
|
||||||
|
|
||||||
public RedisDataStorage (WarehouseProperties properties, WarehouseWorkerPool workerPool,
|
public RedisDataStorage (WarehouseProperties properties, WarehouseWorkerPool workerPool,
|
||||||
MetricsDataQueue dataQueue) {
|
MetricsDataExporter dataExporter) {
|
||||||
this.workerPool = workerPool;
|
this.workerPool = workerPool;
|
||||||
this.dataQueue = dataQueue;
|
this.dataExporter = dataExporter;
|
||||||
|
|
||||||
initRedisClient(properties);
|
initRedisClient(properties);
|
||||||
startStorageData();
|
startStorageData();
|
||||||
}
|
}
|
||||||
@@ -55,7 +54,7 @@ public class RedisDataStorage implements DisposableBean {
|
|||||||
Thread.currentThread().setName("warehouse-redis-data-storage");
|
Thread.currentThread().setName("warehouse-redis-data-storage");
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
while (!Thread.currentThread().isInterrupted()) {
|
||||||
try {
|
try {
|
||||||
CollectRep.MetricsData metricsData = dataQueue.pollRedisMetricsData();
|
CollectRep.MetricsData metricsData = dataExporter.pollWarehouseRedisMetricsData();
|
||||||
if (metricsData != null) {
|
if (metricsData != null) {
|
||||||
saveData(metricsData);
|
saveData(metricsData);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,5 @@ org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
|
|||||||
com.usthe.warehouse.WarehouseProperties,\
|
com.usthe.warehouse.WarehouseProperties,\
|
||||||
com.usthe.warehouse.MetricsDataQueue,\
|
com.usthe.warehouse.MetricsDataQueue,\
|
||||||
com.usthe.warehouse.WarehouseWorkerPool,\
|
com.usthe.warehouse.WarehouseWorkerPool,\
|
||||||
com.usthe.warehouse.entrance.KafkaDataConsume,\
|
|
||||||
com.usthe.warehouse.store.InfluxdbDataStorage,\
|
|
||||||
com.usthe.warehouse.store.RedisDataStorage,\
|
com.usthe.warehouse.store.RedisDataStorage,\
|
||||||
com.usthe.warehouse.controller.MetricsDataController
|
com.usthe.warehouse.controller.MetricsDataController
|
||||||
Reference in New Issue
Block a user