Unverified Commit a387db97 authored by 高岩's avatar 高岩 Committed by GitHub

Feature support metadata cache to memory or redis (#1051)

* Fix entity class deserialization error

* Added metadata cache function, which support memory or redis

* add time filed on Result return

* Redesign the metadata UI and support caching

* fix merge

* Add scroll bar when database exceeds seven

* Add apache Licensed

Co-authored-by: steve <woai1998>
parent 973ee343
......@@ -127,10 +127,10 @@
<artifactId>sa-token-spring-boot-starter</artifactId>
</dependency>
<!-- sa-token 持久化 -->
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-data-redis</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>cn.dev33</groupId>-->
<!-- <artifactId>sa-token-dao-redis-jackson</artifactId>-->
......
......@@ -21,6 +21,7 @@ package com.dlink;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
......@@ -31,6 +32,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
*/
@EnableTransactionManagement
@SpringBootApplication
@EnableCaching
public class Dlink {
public static void main(String[] args) {
......
......@@ -23,6 +23,7 @@ import com.dlink.model.CodeEnum;
import java.io.Serializable;
import cn.hutool.core.date.DateTime;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
......@@ -41,6 +42,7 @@ public class Result<T> implements Serializable {
private T datas;
private Integer code;
private String msg;
private String time;
public static <T> Result<T> succeed(String msg) {
return of(null, CodeEnum.SUCCESS.getCode(), msg);
......@@ -55,7 +57,7 @@ public class Result<T> implements Serializable {
}
public static <T> Result<T> of(T datas, Integer code, String msg) {
return new Result<>(datas, code, msg);
return new Result<>(datas, code, msg,new DateTime().toString());
}
public static <T> Result<T> failed(String msg) {
......
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.dlink.configure;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.serializer.RedisSerializationContext;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* CacheCoonfigure
*
* @author ikiler
* @since 2022/09/24 11:23
*/
@Configuration
public class CacheConfigure {
/**
* 配置Redis缓存注解的value序列化方式
*/
@Bean
public RedisCacheConfiguration cacheConfiguration() {
return RedisCacheConfiguration.defaultCacheConfig()
//序列化为json
.serializeValuesWith(
RedisSerializationContext.SerializationPair.fromSerializer(RedisSerializer.json())
)
.serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(new StringRedisSerializer()));
}
// /**
// * 配置RedisTemplate的序列化方式
// */
// @Bean
// public RedisTemplate redisTemplate(RedisConnectionFactory factory) {
// RedisTemplate redisTemplate = new RedisTemplate();
// redisTemplate.setConnectionFactory(factory);
// // 指定key的序列化方式:string
// redisTemplate.setKeySerializer(RedisSerializer.string());
// // 指定value的序列化方式:json
// redisTemplate.setValueSerializer(RedisSerializer.json());
// return redisTemplate;
// }
}
......@@ -35,6 +35,8 @@ import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
......@@ -167,11 +169,21 @@ public class DataBaseController {
/**
* 获取元数据的表
*/
@Cacheable(cacheNames = "metadata_schema",key = "#id")
@GetMapping("/getSchemasAndTables")
public Result getSchemasAndTables(@RequestParam Integer id) {
return Result.succeed(databaseService.getSchemasAndTables(id), "获取成功");
}
/**
* 清除元数据表的缓存
*/
@CacheEvict(cacheNames = "metadata_schema",key = "#id")
@GetMapping("/unCacheSchemasAndTables")
public Result unCacheSchemasAndTables(@RequestParam Integer id) {
return Result.succeed("clear cache", "success");
}
/**
* 获取元数据的指定表的列
*/
......
......@@ -11,6 +11,20 @@ spring:
matching-strategy: ant_path_matcher
main:
allow-circular-references: true
# 默认使用内存缓存元数据信息,
# dlink支持redis缓存,如有需要请把simple改为redis,并打开下面的redis连接配置
# 子配置项可以按需要打开或自定义配置
cache:
type: simple
## 如果type配置为redis,则该项可按需配置
# redis:
## 是否缓存空值,保存默认即可
# cache-null-values: false
## 缓存过期时间,24小时
# time-to-live: 86400
# flyway:
# enabled: false
# clean-disabled: true
......
......@@ -45,6 +45,12 @@ public class Schema implements Serializable, Comparable<Schema> {
private List<String> userFunctions = new ArrayList<>();
private List<String> modules = new ArrayList<>();
/**
* 需要保留一个空构造方法,否则序列化有问题
* */
public Schema() {
}
public Schema(String name) {
this.name = name;
}
......
......@@ -22,6 +22,7 @@ package com.dlink.model;
import com.dlink.assertion.Asserts;
import com.dlink.utils.SqlUtil;
import java.beans.Transient;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
......@@ -75,10 +76,12 @@ public class Table implements Serializable, Comparable<Table> {
this.columns = columns;
}
@Transient
public String getSchemaTableName() {
return Asserts.isNullString(schema) ? name : schema + "." + name;
}
@Transient
public String getSchemaTableNameWithUnderline() {
return Asserts.isNullString(schema) ? name : schema + "_" + name;
}
......@@ -100,6 +103,7 @@ public class Table implements Serializable, Comparable<Table> {
return new Table(name, schema, columns);
}
@Transient
public String getFlinkTableWith(String flinkConfig) {
String tableWithSql = "";
if (Asserts.isNotNullString(flinkConfig)) {
......@@ -109,10 +113,12 @@ public class Table implements Serializable, Comparable<Table> {
return tableWithSql;
}
@Transient
public String getFlinkTableSql(String flinkConfig) {
return getFlinkDDL(flinkConfig, name);
}
@Transient
public String getFlinkDDL(String flinkConfig, String tableName) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE IF NOT EXISTS " + tableName + " (\n");
......@@ -162,6 +168,7 @@ public class Table implements Serializable, Comparable<Table> {
return sb.toString();
}
@Transient
public String getFlinkTableSql(String catalogName, String flinkConfig) {
StringBuilder sb = new StringBuilder("DROP TABLE IF EXISTS ");
String fullSchemaName = catalogName + "." + schema + "." + name;
......@@ -213,6 +220,7 @@ public class Table implements Serializable, Comparable<Table> {
return sb.toString();
}
@Transient
public String getSqlSelect(String catalogName) {
StringBuilder sb = new StringBuilder("SELECT\n");
for (int i = 0; i < columns.size(); i++) {
......@@ -239,6 +247,7 @@ public class Table implements Serializable, Comparable<Table> {
return sb.toString();
}
@Transient
public String getCDCSqlInsert(String targetName, String sourceName) {
StringBuilder sb = new StringBuilder("INSERT INTO ");
sb.append(targetName);
......
......@@ -222,6 +222,11 @@ export function showAlertGroup(dispatch: any) {
export function showMetaDataTable(id: number) {
return getData('api/database/getSchemasAndTables', {id: id});
}
/*--- 清理 元数据表缓存 ---*/
export function clearMetaDataTable(id: number) {
return getData('api/database/unCacheSchemasAndTables', {id: id});
}
/*--- 刷新 数据表样例数据 ---*/
export function showTableData(id: number,schemaName:String,tableName:String,option:{}) {
return postAll('api/database/queryData', {id: id,schemaName:schemaName,tableName:tableName,option:option});
......
......@@ -32,6 +32,8 @@ export interface TreeDataNode extends DataNode {
taskId:number;
parentId:number;
path:string[];
schema:string;
table:string;
}
export function convertToTreeData(data:TreeDataNode[], pid:number,path?:string[]) {
......
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
.margin_10{
margin: 10px;
......
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
//主体样式
.container {
margin-top: 5px;
background: #ffffff;
height: 95vh;
padding: 8px
}
//顶部数据库列表父组件样式,覆盖原有seagement样式
.headerBarContent {
overflow-x: scroll;
width: 100%;
background-color:#fafafa;
:global{
.ant-segmented:not(.ant-segmented-disabled):hover, .ant-segmented:not(.ant-segmented-disabled):focus{
background-color:#fafafa;
}
}
}
//顶部数据库列表样式
.headerBar {
background: #fafafa;
padding: 8px;
}
//数据库列表单条car样式
.headerCard {
width:200px;
:global{
.ant-card-body{
padding: 8px;
}
}
}
//table标题样式
.tableListHead{
background-color: #f9f9f9;
padding: 8px;
:global{
.ant-card-meta-title{
font-size: 14px;
}
.ant-card-meta-description{
color: rgba(0, 0, 0, 0.45);
font-size: 10px;
.content-height{
height: 100%;
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment