Commit 0d7ac85f authored by yutao's avatar yutao

init

parent e12a962d
...@@ -39,102 +39,113 @@ ...@@ -39,102 +39,113 @@
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>0.293</version>
</dependency>
<!-- Hive JDBC 驱动 -->
<dependency> <dependency>
<groupId>org.apache.hive</groupId> <groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>hive-jdbc</artifactId> <artifactId>caffeine</artifactId>
<version>3.1.2</version> <version>3.2.2</version>
<exclusions> </dependency>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<artifactId>log4j-1.2-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-slf4j-impl</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-web</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-runner</artifactId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet.jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet.jsp</artifactId>
<groupId>org.glassfish.web</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
</exclusions>
<!-- &lt;!&ndash; Hive JDBC 驱动 &ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <artifactId>hive-jdbc</artifactId>-->
<!-- <version>3.1.2</version>-->
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <groupId>org.slf4j</groupId>-->
<!-- <artifactId>slf4j-log4j12</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>log4j</groupId>-->
<!-- <artifactId>log4j</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>log4j-1.2-api</artifactId>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>log4j-slf4j-impl</artifactId>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>log4j-web</artifactId>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>commons-logging</artifactId>-->
<!-- <groupId>commons-logging</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>jdk.tools</artifactId>-->
<!-- <groupId>jdk.tools</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>org.eclipse.jetty.aggregate</groupId>-->
<!-- <artifactId>*</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <groupId>org.eclipse.jetty</groupId>-->
<!-- <artifactId>jetty-runner</artifactId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>servlet-api</artifactId>-->
<!-- <groupId>javax.servlet</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>javax.servlet.jsp-api</artifactId>-->
<!-- <groupId>javax.servlet.jsp</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>javax.servlet.jsp</artifactId>-->
<!-- <groupId>org.glassfish.web</groupId>-->
<!-- </exclusion>-->
<!-- <exclusion>-->
<!-- <artifactId>junit</artifactId>-->
<!-- <groupId>junit</groupId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId> <!-- </dependency>-->
<artifactId>hadoop-common</artifactId>
<version>3.3.4</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>3.3.4</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency> <!-- <dependency>-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <!-- <groupId>org.apache.hadoop</groupId>-->
<version>3.3.4</version> <!-- <artifactId>hadoop-common</artifactId>-->
</dependency> <!-- <version>3.3.4</version>-->
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <artifactId>commons-logging</artifactId>-->
<!-- <groupId>commons-logging</groupId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-hdfs</artifactId>-->
<!-- <version>3.3.4</version>-->
<!-- <exclusions>-->
<!-- <exclusion>-->
<!-- <artifactId>commons-logging</artifactId>-->
<!-- <groupId>commons-logging</groupId>-->
<!-- </exclusion>-->
<!-- </exclusions>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-client</artifactId>-->
<!-- <version>3.3.4</version>-->
<!-- </dependency>-->
<dependency> <dependency>
<groupId>com.zaxxer</groupId> <groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId> <artifactId>HikariCP</artifactId>
......
package com.ediagnosis.cdr.cache;
import java.util.Optional;
public interface CacheExecutor {
void put(String key, Object value);
<T> Optional<T> get(String key, Class<T> clazz);
}
package com.ediagnosis.cdr.cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Optional;
// todo: 待扩展,按需缓存策略进行缓存
@Component
public class CacheFacade {
private static final Logger log = LoggerFactory.getLogger(CacheFacade.class);
private final List<CacheExecutor> cacheExecutors;
public CacheFacade(List<CacheExecutor> cacheExecutors) {
this.cacheExecutors = cacheExecutors;
CacheStrategyHolder.initCacheStrategy();
}
public <T> Optional<T> getCache(String key, Class<T> clazz) {
if (cacheExecutors.isEmpty()) {
log.warn("没有可用的缓存执行器");
return Optional.empty();
}
for (CacheExecutor executor : cacheExecutors) {
Optional<T> optional = executor.get(key, clazz);
if (optional.isPresent()) {
return optional;
}
}
return Optional.empty();
}
public void putCache(String key, String value) {
if (cacheExecutors.isEmpty()) {
log.warn("没有可用的缓存执行器");
}
for (CacheExecutor executor : cacheExecutors) {
executor.put(key, value);
}
}
}
package com.ediagnosis.cdr.cache;
import com.ediagnosis.cdr.context.JsonProcessor;
import java.util.Map;
import java.util.Optional;
public class CacheStrategyHolder {
private static Map cacheStrategy;
public static void initCacheStrategy() {
String CACHE_STRATEGY_PATH = "classpath:cache-strategy.json";
Optional<Map> optionalMap = JsonProcessor.readJsonFileToObject(CACHE_STRATEGY_PATH, Map.class);
optionalMap.ifPresent(map -> cacheStrategy = map);
}
public static Object getStrategy(String key) {
return cacheStrategy.get(key);
}
}
package com.ediagnosis.cdr.cache;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
@Component
@Order(1)
public class CaffeineCacheExecutor implements CacheExecutor {
private static final Logger log = LoggerFactory.getLogger(CaffeineCacheExecutor.class);
private final Cache<String, Object> cache;
public CaffeineCacheExecutor() {
this.cache = Caffeine.newBuilder()
.expireAfterWrite(3, TimeUnit.MINUTES)
.maximumSize(10_000)
.build();
}
@Override
public void put(String key, Object value) {
log.info("put cache key:{}", key);
cache.put(key, value);
}
@Override
public <T> Optional<T> get(String key, Class<T> clazz) {
Object object = cache.getIfPresent(key);
if (object != null) {
if (clazz.isInstance(object)) {
return Optional.of(clazz.cast(object));
} else {
// 类型不匹配,可以记录警告或直接返回 empty
log.warn("Cached object type mismatch for key '{}': expected {}, got {}",
key, clazz.getName(), object.getClass().getName());
return Optional.empty();
}
}
log.warn("Cache miss for key '{}'", key);
return Optional.empty();
}
}
...@@ -4,7 +4,10 @@ import com.fasterxml.jackson.core.JsonProcessingException; ...@@ -4,7 +4,10 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import java.io.File;
import java.io.IOException;
import java.util.Optional; import java.util.Optional;
...@@ -15,24 +18,66 @@ public class JsonProcessor { ...@@ -15,24 +18,66 @@ public class JsonProcessor {
public static <T> Optional<T> toObject(String json, Class<T> clazz) { public static <T> Optional<T> toObject(String json, Class<T> clazz) {
T value = null; T value;
try { try {
value = objectMapper.readValue(json, clazz); value = objectMapper.readValue(json, clazz);
} catch (Exception e) { } catch (Exception e) {
log.error("json to object error", e); log.error("json to object error", e);
return Optional.empty();
} }
return Optional.ofNullable(value); return Optional.ofNullable(value);
} }
public static Optional<String> toJson(Object object) { public static Optional<String> toJson(Object object) {
String value = ""; String value;
try { try {
value = objectMapper.writeValueAsString(object); value = objectMapper.writeValueAsString(object);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
log.error("object to json error", e); log.error("object to json error", e);
return Optional.empty();
}
return Optional.ofNullable(value);
}
public static void print(Object object) {
toJson(object).ifPresent(System.out::println);
}
public static <T> Optional<T> readJsonFileToObject(String path, Class<T> clazz) {
T value;
if (path.startsWith("classpath:")) {
path = path.substring("classpath:".length());
ClassPathResource resource = new ClassPathResource(path);
if (!resource.exists()) {
log.error("classpath resource not exists: {}", path);
return Optional.empty();
}
try {
value = objectMapper.readValue(resource.getInputStream(), clazz);
} catch (IOException e) {
log.error("read json file error", e);
return Optional.empty();
}
return Optional.ofNullable(value);
}else {
File file = new File(path);
if (!file.exists()) {
log.error("file not exists: {}", path);
return Optional.empty();
}
try {
value = objectMapper.readValue(file, clazz);
} catch (IOException e) {
log.error("read json file error", e);
return Optional.empty();
}
} }
return Optional.ofNullable(value); return Optional.ofNullable(value);
} }
} }
package com.ediagnosis.cdr.value; package com.ediagnosis.cdr.context.value;
import java.util.List; import java.util.List;
......
package com.ediagnosis.cdr.value; package com.ediagnosis.cdr.context.value;
import java.io.Serializable; import java.io.Serializable;
......
package com.ediagnosis.cdr.dao; package com.ediagnosis.cdr.dao;
import com.ediagnosis.cdr.value.Page; import com.ediagnosis.cdr.context.value.Page;
import com.mybatisflex.core.row.Row; import com.mybatisflex.core.row.Row;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
......
package com.ediagnosis.cdr.dao; package com.ediagnosis.cdr.dao;
import com.ediagnosis.cdr.dao.entity.DimDataSummaryEntity; import com.ediagnosis.cdr.dao.entity.IndicatorEntity;
import com.ediagnosis.cdr.dao.mapper.DimDataSummaryMapper; import com.ediagnosis.cdr.dao.mapper.IndicatorMapper;
import com.ediagnosis.cdr.domain.DimDataSummary;
import com.ediagnosis.cdr.context.TimeUtil; import com.ediagnosis.cdr.context.TimeUtil;
import com.ediagnosis.cdr.dataGovernance.value.Indicator;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional; import java.util.Optional;
@Repository @Repository
public class DimDataSummaryRepository { public class IndicatorRepository {
private final DimDataSummaryMapper dimDataSummaryMapper; private final IndicatorMapper indicatorMapper;
public DimDataSummaryRepository(DimDataSummaryMapper dimDataSummaryMapper) { public IndicatorRepository(IndicatorMapper indicatorMapper) {
this.dimDataSummaryMapper = dimDataSummaryMapper; this.indicatorMapper = indicatorMapper;
} }
public List<DimDataSummary> getAllDimDataSummary() {
List<DimDataSummaryEntity> entityList = dimDataSummaryMapper.selectAll();
return entityList.stream().map(this::convertToDimDataSummary).toList();
}
private DimDataSummary convertToDimDataSummary(DimDataSummaryEntity entity) {
Optional<LocalDateTime> timeOptional = TimeUtil.toLocalDateTime(entity.getUpdateTime());
return new DimDataSummary( private Optional<Indicator> convertToIndicator(IndicatorEntity entity) {
Optional<LocalDateTime> timeOptional = TimeUtil.toLocalDateTime(entity.getUpdateTime());
return timeOptional.map(localDateTime -> new Indicator(
entity.getName(), entity.getName(),
entity.getCode(), entity.getCode(),
entity.getValue(), entity.getValue(),
timeOptional.orElse(null) localDateTime
); ));
} }
......
...@@ -9,8 +9,8 @@ import com.mybatisflex.annotation.Table; ...@@ -9,8 +9,8 @@ import com.mybatisflex.annotation.Table;
* 记录数据总量概览 * 记录数据总量概览
* @TableName dim_data_summary * @TableName dim_data_summary
*/ */
@Table(value ="dim_data_summary") @Table(value ="indicator")
public class DimDataSummaryEntity { public class IndicatorEntity {
/** /**
* *
*/ */
...@@ -32,6 +32,8 @@ public class DimDataSummaryEntity { ...@@ -32,6 +32,8 @@ public class DimDataSummaryEntity {
*/ */
private String value; private String value;
private String description;
/** /**
* 更新时间 * 更新时间
*/ */
...@@ -93,6 +95,15 @@ public class DimDataSummaryEntity { ...@@ -93,6 +95,15 @@ public class DimDataSummaryEntity {
this.value = value; this.value = value;
} }
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/** /**
* 更新时间 * 更新时间
*/ */
......
package com.ediagnosis.cdr.dao.mapper; package com.ediagnosis.cdr.dao.mapper;
import com.ediagnosis.cdr.dao.entity.DimDataSummaryEntity; import com.ediagnosis.cdr.dao.entity.IndicatorEntity;
import com.mybatisflex.core.BaseMapper; import com.mybatisflex.core.BaseMapper;
import org.mybatis.spring.annotation.MapperScan; import org.mybatis.spring.annotation.MapperScan;
@MapperScan @MapperScan
public interface DimDataSummaryMapper extends BaseMapper<DimDataSummaryEntity> { public interface IndicatorMapper extends BaseMapper<IndicatorEntity> {
} }
......
package com.ediagnosis.cdr.dashBoard;
import com.ediagnosis.cdr.context.value.Response;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("/host")
public class DashBoardController {
private final HostMonitorValueRepository hostMonitorValueRepository;
public DashBoardController(HostMonitorValueRepository hostMonitorValueRepository) {
this.hostMonitorValueRepository = hostMonitorValueRepository;
}
@GetMapping("/list")
public Response<List<HostMonitorValue.HostValue>> list() {
HostMonitorValue.HostValue hostValue = hostMonitorValueRepository.getHostValue();
return Response.success(List.of(hostValue));
}
@PostMapping("/monitor")
public Response<List<HostMonitorValue>> monitor(String hostName) {
HostMonitorValue hostMonitorValue = hostMonitorValueRepository.getHostMonitorValue(hostName);
return Response.success(List.of(hostMonitorValue));
}
}
package com.ediagnosis.cdr.dashBoard;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class HostMonitor {
private static final Logger log = LoggerFactory.getLogger(HostMonitor.class);
public static Optional<Map<String, String>> getHostIpAndName() {
String hostname = "";
String ip = "";
Optional<String> optionalHostname = executeCommand("hostname");
Optional<String> optionalIp = executeCommand("hostname", "-I");
if (optionalHostname.isPresent()) {
hostname = optionalHostname.get();
}
if (optionalIp.isPresent()) {
ip = optionalIp.get();
}
Map<String, String> host = Map.of("hostName", hostname,
"ip", ip);
log.info("主机信息: " + host);
return Optional.of(host);
}
/**
* 执行系统命令并返回第一行输出
*/
private static Optional<String> executeCommand(String... command) {
ProcessBuilder pb = new ProcessBuilder(command);
pb.redirectErrorStream(true); // 合并标准输出和错误输出
Optional<String> optionalString = Optional.empty();
log.info("执行命令: " + String.join(" ", command));
Process process;
try {
process = pb.start();
} catch (IOException e) {
// 可以记录日志
log.error("执行命令失败:" + String.join(" ", command), e);
return optionalString;
}
// 使用 try-with-resources 自动关闭流
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(process.getInputStream()))) {
String line = reader.readLine(); // 通常只关心第一行
optionalString = Optional.of(line);
} catch (IOException e) {
// 可以记录日志
log.error("读取命令结果失败:" + String.join(" ", command), e);
return optionalString;
}
return optionalString;
}
public static Optional<Map<String, String>> getCpuUsage() {
Optional<Map<String, String>> optionalMap = Optional.empty();
ProcessBuilder pb = new ProcessBuilder("top", "-bn1");
log.info("执行命令:top -bn1");
Process process = null;
try {
process = pb.start();
} catch (IOException e) {
log.error("执行命令时,启动进程失败: top -bn1", e);
return optionalMap;
}
String cpuLine = "";
String loadLine = "";
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
if (line.contains("load average:")) {
loadLine = line;
} else if (line.contains("Cpu(s)")) {
cpuLine = line;
}
if (!cpuLine.isBlank() && !loadLine.isBlank()) {
break;
}
}
} catch (IOException e) {
log.error("读取cpu信息时失败:top -bn1 ", e);
return optionalMap;
}
// 去掉前缀 "%Cpu(s):",并提取数字
String[] parts = cpuLine
.replace("%Cpu(s):", "")
.trim()
.split(",");
// 存储各个字段的值
double id = 0;
for (String part : parts) {
part = part.trim();
if (part.endsWith("id")) {
id = Double.parseDouble(part.replace("id", "").trim());
}
}
String cpuUsage = Math.round(100 - id) + "%";
String cpuCores = String.valueOf(
Runtime.getRuntime().availableProcessors()
);
String loadAverage = "";
// 正则匹配 "load average: 数值, 数值, 数值"
Pattern pattern = Pattern.compile("load average: ([\\d.]+), ([\\d.]+), ([\\d.]+)");
Matcher matcher = pattern.matcher(loadLine);
if (matcher.find()) {
double fiveMin = Double.parseDouble(matcher.group(2));
loadAverage = String.valueOf(fiveMin);
}
// 方法一:100 - idle
Map<String, String> cpu = Map.of(
"cpuUsage", cpuUsage,
"cpuCores", cpuCores,
"loadAverage", loadAverage
);
log.info("cpu信息: " + cpu);
return Optional.of(cpu);
}
public static Optional<Map<String, String>> getMemoryUsage() {
Optional<Map<String, String>> optionalMap = Optional.empty();
ProcessBuilder pb = new ProcessBuilder("free", "-h");
log.info("执行命令:free -h");
Process process = null;
try {
process = pb.start();
} catch (IOException e) {
log.error("执行命令时,启动进程失败: free -h");
return optionalMap;
}
String memLine = "";
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
if (line.startsWith("Mem:")) {
memLine = line;
}
}
} catch (IOException e) {
log.error("读取内存信息时失败:free -h ");
}
// 将多个空格替换为单个空格,便于分割
String[] parts = memLine.trim().replaceAll("\\s+", " ").split(" ");
// 解析各字段(单位:KB、M、G)
String totalStr = parts[1];
String availableStr = parts[6];
// 转换为统一单位(GB)
double total = parseSizeToGB(totalStr);
double available = parseSizeToGB(availableStr);
// 计算内存使用率:(1 - available / total) * 100%
double usagePercent = (1 - (available / total)) * 100;
String memoryUsage = Math.round(usagePercent) + "%";
String used = (total - available) + "G";
String size = total + "G";
String free = available + "G";
Map<String, String> memory = Map.of(
"used", used,
"size", size,
"free", free,
"memoryUsage", memoryUsage
);
log.info("内存信息: " + memory);
return Optional.of(memory);
}
private static double parseSizeToGB(String size) {
double value;
if (size.endsWith("G")) {
value = Double.parseDouble(size.replace("G", ""));
} else if (size.endsWith("M")) {
value = Double.parseDouble(size.replace("M", "")) / 1024;
} else if (size.endsWith("K")) {
value = Double.parseDouble(size.replace("K", "")) / (1024 * 1024);
} else {
value = Double.parseDouble(size) / (1024 * 1024 * 1024);
}
return value;
}
public static Optional<Map<String, String>> getDiskUsage() {
ProcessBuilder pb = new ProcessBuilder("df", "-h", "/");
Optional<Map<String, String>> optionalMap = Optional.empty();
log.info("执行命令:df -h /");
Process process = null;
try {
process = pb.start();
} catch (IOException e) {
log.error("执行命令时,启动进程失败: df -h /", e);
return optionalMap;
}
String body;
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
body = reader.readLine();
} catch (IOException e) {
log.error("读取磁盘信息时失败:df -h /", e);
return optionalMap;
}
String[] parts = body.trim().replaceAll("\\s+", " ").split(" ");
// 确保是有效行(至少有6列)
if (parts.length < 6 || parts[4].indexOf('%') == -1) {
throw new IllegalArgumentException("无效的磁盘使用行");
}
// 提取 Use% 字段(如 "69%")
String usePercent = parts[4];
String size = parts[1];
String free = parts[3];
String used = parts[2];
// 转换为整数
Map<String, String> disk = Map.of(
"usePercent", usePercent,
"size", size,
"free", free,
"used", used);
log.info("磁盘信息: " + disk);
return Optional.of(disk);
}
}
package com.ediagnosis.cdr.dashBoard;
public record HostMonitorValue(
HostValue host,
CpuValue cpu,
MemoryValue memory,
DiskValue disk
) {
public record HostValue(
String hostName,
String hostIp
){
}
public record CpuValue(
int coreNum,
double fiveMinLoad,
String usage
){
}
public record MemoryValue(
String usage,
String size,
String used,
String free
){
}
public record DiskValue(
String usage,
String size,
String used,
String free
){
}
}
package com.ediagnosis.cdr.dashBoard;
import org.springframework.stereotype.Component;
import java.util.Map;
import java.util.Optional;
@Component
public class HostMonitorValueRepository {
// todo: 待扩展,获取集群主机的列表
public HostMonitorValue.HostValue getHostValue() {
Optional<Map<String, String>> hostIpAndNameOptional = HostMonitor.getHostIpAndName();
String hostname = "";
String ip = "";
if (hostIpAndNameOptional.isPresent()) {
Map<String, String> hostMap = hostIpAndNameOptional.get();
hostname = hostMap.get("hostname");
ip = hostMap.get("ip");
}
return new HostMonitorValue.HostValue(hostname, ip);
}
// todo: 带扩展,获取指定主机的监控信息
public HostMonitorValue getHostMonitorValue(String hostName) {
Optional<Map<String, String>> hostIpAndNameOptional = HostMonitor.getHostIpAndName();
HostMonitorValue.HostValue hostValue;
if (hostIpAndNameOptional.isPresent()) {
Map<String, String> hostMap = hostIpAndNameOptional.get();
String hostNameInner = hostMap.get("hostname");
String ip = hostMap.get("ip");
hostValue = new HostMonitorValue.HostValue(hostNameInner, ip);
}else {
hostValue = new HostMonitorValue.HostValue("", "");
}
Optional<Map<String, String>> cpuUsageOptional = HostMonitor.getCpuUsage();
HostMonitorValue.CpuValue cpuValue;
if (cpuUsageOptional.isPresent()) {
Map<String, String> cpuMap = cpuUsageOptional.get();
String cpuUsage = cpuMap.get("cpuUsage");
String cpuCores = cpuMap.get("cpuCores");
int cpuCoresNum = Integer.parseInt(cpuCores);
String loadAverage = cpuMap.get("loadAverage");
double loadAverageNum = Double.parseDouble(loadAverage);
cpuValue = new HostMonitorValue.CpuValue(cpuCoresNum, loadAverageNum, cpuUsage);
} else {
cpuValue = new HostMonitorValue.CpuValue(0, 0, "");
}
Optional<Map<String, String>> memoryUsageOptional = HostMonitor.getMemoryUsage();
HostMonitorValue.MemoryValue memoryValue;
if (memoryUsageOptional.isPresent()) {
Map<String, String> memoryMap = memoryUsageOptional.get();
String used = memoryMap.get("used");
String size = memoryMap.get("size");
String free = memoryMap.get("free");
String memoryUsage = memoryMap.get("memoryUsage");
memoryValue = new HostMonitorValue.MemoryValue(memoryUsage, size, used, free);
} else {
memoryValue = new HostMonitorValue.MemoryValue("", "", "", "");
}
Optional<Map<String, String>> diskUsageOptional = HostMonitor.getDiskUsage();
HostMonitorValue.DiskValue diskValue;
if (diskUsageOptional.isPresent()) {
Map<String, String> diskMap = diskUsageOptional.get();
String usePercent = diskMap.get("usePercent");
String used = diskMap.get("used");
String size = diskMap.get("size");
String free = diskMap.get("free");
diskValue = new HostMonitorValue.DiskValue(usePercent, size, used, free);
} else {
diskValue = new HostMonitorValue.DiskValue("", "", "", "");
}
HostMonitorValue hostMonitorValue = new HostMonitorValue(hostValue, cpuValue, memoryValue, diskValue);
return hostMonitorValue;
}
}
package com.ediagnosis.cdr.dataGovernance;
import com.ediagnosis.cdr.cache.CacheFacade;
import com.ediagnosis.cdr.dataGovernance.value.TableDefinitionItem;
import com.ediagnosis.cdr.dataGovernance.value.TableTree;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public class CacheableTableDefinitionRepository implements TableDefinitionRepository{
private final DefaultTableDefinitionRepository defaultTableDefinitionRepository;
private final CacheFacade cacheFacade;
public CacheableTableDefinitionRepository(DefaultTableDefinitionRepository defaultTableDefinitionRepository,
CacheFacade cacheFacade) {
this.defaultTableDefinitionRepository = defaultTableDefinitionRepository;
this.cacheFacade = cacheFacade;
}
@Override
public TableTree getTableTree(String keyword) {
cacheFacade.getCache(keyword, TableTree.class);
defaultTableDefinitionRepository.getTableTree(keyword);
return null;
}
@Override
public List<TableDefinitionItem> getTableDefinitionItem(String tableId) {
return List.of();
}
}
package com.ediagnosis.cdr.controller; package com.ediagnosis.cdr.dataGovernance;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
@RestController @RestController
@RequestMapping("/dashBoard") public class DataGovernanceController {
public class DashBoardController {
......
package com.ediagnosis.cdr.dataGovernance;
import com.ediagnosis.cdr.dataGovernance.value.TableDefinitionItem;
import com.ediagnosis.cdr.dataGovernance.value.TableTree;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
public class DefaultTableDefinitionRepository implements TableDefinitionRepository {
@Override
public TableTree getTableTree(String keyword) {
return null;
}
@Override
public List<TableDefinitionItem> getTableDefinitionItem(String tableId) {
return List.of();
}
}
package com.ediagnosis.cdr.dataGovernance;
import com.ediagnosis.cdr.dataGovernance.value.TableDefinitionItem;
import com.ediagnosis.cdr.dataGovernance.value.TableTree;
import java.util.List;
public interface TableDefinitionRepository {
/**
* 关键字搜索表
* @param keyword 关键字
*/
TableTree getTableTree(String keyword);
List<TableDefinitionItem> getTableDefinitionItem(String tableId);
}
package com.ediagnosis.cdr.dataGovernance.request;
public record SourceTableRequest(
) {
}
package com.ediagnosis.cdr.dataGovernance.request;
public record TableFieldRequest(
Integer pageNo,
Integer pageSize,
Integer tableId
) {
}
package com.ediagnosis.cdr.domain; package com.ediagnosis.cdr.dataGovernance.value;
import java.time.LocalDateTime; import java.time.LocalDateTime;
public record DimDataSummary( public record Indicator(
String name, String name,
String code, String code,
String value, String value,
......
package com.ediagnosis.cdr.dataGovernance.value;
public record TableDefinitionItem(
String column,
String type,
String comment
) {
}
package com.ediagnosis.cdr.dataGovernance.value;
import java.util.LinkedList;
import java.util.List;
public class TableTree {
public static class TreeNode {
private final String id;
private final String code;
private final List<TreeNode> children = new LinkedList<>();
private final boolean tableNodeFlag;
public TreeNode(String id, String code, boolean tableNodeFlag) {
this.id = id;
this.code = code;
this.tableNodeFlag = tableNodeFlag;
}
public TreeNode appendChild(String id, String code,boolean tableNodeFlag) {
TreeNode node = new TreeNode(id,code,tableNodeFlag);
this.children.add(node);
return node;
}
public String getId() {
return id;
}
public String getCode() {
return code;
}
public boolean isTableNodeFlag() {
return tableNodeFlag;
}
public List<TreeNode> getChildren() {
return children;
}
}
private List<TreeNode> rootNodes;
public static TableTree create() {
TableTree tableTree = new TableTree();
tableTree.rootNodes = new LinkedList<>();
return tableTree;
}
public TableTree ofRoot(TreeNode... node) {
rootNodes.addAll(List.of(node));
return this;
}
public TableTree ofChild(TreeNode parent, TreeNode... childNode) {
parent.children.addAll(List.of(childNode));
return this;
}
public TreeNode addRoot(TreeNode node) {
rootNodes.add(node);
return node;
}
public List<TreeNode> getRootNodes() {
return rootNodes;
}
public void setRootNodes(List<TreeNode> rootNodes) {
this.rootNodes = rootNodes;
}
}
...@@ -8,5 +8,8 @@ mybatis-flex: ...@@ -8,5 +8,8 @@ mybatis-flex:
ds-hive: ds-hive:
url: jdbc:hive2://10.11.4.21:10000 url: jdbc:hive2://10.11.4.21:10000
username: root username: root
ds-presto:
url: jdbc:presto://10.11.4.21:8999
username: root
server: server:
port: 7001 port: 7001
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.ediagnosis.cdr.dao.mapper.DimDataSummaryMapper">
<resultMap id="BaseResultMap" type="com.ediagnosis.cdr.dao.entity.DimDataSummaryEntity">
<id property="id" column="id" />
<result property="name" column="name" />
<result property="code" column="code" />
<result property="value" column="value" />
<result property="updateTime" column="update_time" />
</resultMap>
<sql id="Base_Column_List">
id,name,code,value,update_time
</sql>
</mapper>
<configuration>
<!-- 定义日志文件的位置 -->
<property name="LOG_PATH" value="logs"/>
<property name="LOG_FILE" value="app-log"/>
<!-- 控制台输出设置 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 文件输出设置 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/${LOG_FILE}.log</file>
<!-- 滚动策略:基于时间和文件大小 -->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 滚动文件命名模式:每天一个目录或文件,并带序号 -->
<!-- %d{yyyy-MM-dd} 表示按天分割 -->
<!-- %i 是当文件太大时的分片序号 -->
<fileNamePattern>${LOG_PATH}/archive/${LOG_FILE}-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!-- 单个文件最大大小,超过则拆分 -->
<maxFileSize>100MB</maxFileSize>
<!-- 每天最多保留的归档文件总大小 -->
<!-- 可选:限制总存储空间 -->
<!-- <totalSizeCap>10GB</totalSizeCap> -->
<!-- 最多保留多少天的日志 -->
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 设置全局日志级别为INFO,并指定输出到哪个appender -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<!-- <appender-ref ref="FILE" />-->
</root>
</configuration>
\ No newline at end of file
...@@ -5,6 +5,8 @@ import org.junit.jupiter.api.Test; ...@@ -5,6 +5,8 @@ import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
...@@ -30,4 +32,23 @@ class JsonProcessorTest { ...@@ -30,4 +32,23 @@ class JsonProcessorTest {
Assert.isTrue(json.get().equals("{\"name\":\"张三\",\"age\":18}"), "转换结果与预期不一致"); Assert.isTrue(json.get().equals("{\"name\":\"张三\",\"age\":18}"), "转换结果与预期不一致");
} }
@Test
void readJsonFileToObject() {
String path="classpath:cache-strategy.json";
Optional<Map> optionalMap = JsonProcessor.readJsonFileToObject(path, Map.class);
Assert.isTrue(optionalMap.isPresent(), "转换结果不应为空");
System.out.println(optionalMap);
Map map = optionalMap.get();
Object tableTree = map.get("fullTableTree");
if(tableTree instanceof Map tableTreeMap ){
Object expiration = tableTreeMap.get("expiration");
if(expiration instanceof String s){
System.out.println(s);
}
}
}
} }
\ No newline at end of file
...@@ -10,7 +10,7 @@ import java.util.concurrent.ExecutionException; ...@@ -10,7 +10,7 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
@SpringBootTest @SpringBootTest
public class AsyncTest { public class AsyncTest {
......
package com.ediagnosis.cdr.dataGovernance;
import com.mybatisflex.core.datasource.DataSourceKey;
import com.mybatisflex.core.row.Db;
import com.mybatisflex.core.row.Row;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.ArrayList;
import java.util.List;
@SpringBootTest
public class TableDefinitionTest {
@Test
public void testPresto() {
try{
DataSourceKey.use("ds-presto");
String sql = "select count(*) from hive.ods.xjd_patient_info";
List<Row> rows = Db.selectListBySql(sql);
System.out.println(rows.size());
System.out.println(rows);
}finally{
DataSourceKey.clear();
}
}
@Test
public void testTableDefinition() {
List<Row> rows=null;
try{
DataSourceKey.use("ds-presto");
String sql = "DESCRIBE hive.ods.xjd_patient_info";
rows = Db.selectListBySql(sql);
}finally{
DataSourceKey.clear();
}
List<TestTableDefinitionItem> items = new ArrayList<>(rows.size());
for (Row row : rows) {
TestTableDefinitionItem item = new TestTableDefinitionItem();
item.setColumn(row.getString("Column"));
item.setType(row.getString("Type"));
item.setComment(row.getString("Comment"));
System.out.println(item);
items.add(item);
}
System.out.println(rows.size());
System.out.println(items.size());
}
}
package com.ediagnosis.cdr.dataGovernance;
import com.ediagnosis.cdr.context.JsonProcessor;
import com.ediagnosis.cdr.dataGovernance.value.TableTree;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
public class TableTreeTest {
@Test
public void testTableTree() {
TableTree tableTree = TableTree.create();
tableTree.addRoot(new TableTree.TreeNode("1","ip-xxxx", false))
.appendChild("2","dataBase-xxxx",false)
.appendChild("3","table-xxx",true);
tableTree.addRoot(new TableTree.TreeNode("4","ip-yyyy", false))
.appendChild("5","catalog-yyyy",false)
.appendChild("6","schema-yyyy", true)
.appendChild("7","table-yyyy", true);
tableTree.addRoot(new TableTree.TreeNode("8","ip-zzzz",false ))
.appendChild("9","table-zzzz",true);
JsonProcessor.print(tableTree);
}
}
package com.ediagnosis.cdr.dataGovernance;
public class TestTableDefinitionItem {
private String column;
private String type;
private String comment;
public String getColumn() {
return column;
}
public void setColumn(String column) {
this.column = column;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public String toString() {
return "TestTableDefinitionItem{" +
"column='" + column + '\'' +
", type='" + type + '\'' +
", comment='" + comment + '\'' +
'}';
}
}
{
"fullTableTree": {
"expiration": "3000",
"otherProperties": "xxxx"
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment