Commit e9d05036 authored by godkaikai's avatar godkaikai

changlog和table的查询实现

parent 09c9f5bf
package com.dlink.utils;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ObjectIdentifier;
......@@ -26,4 +27,8 @@ public class FlinkUtil {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
return Arrays.asList(tableResult.getTableSchema().getFieldNames());
}
}
package com.dlink.utils;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ObjectIdentifier;
......@@ -26,4 +27,8 @@ public class FlinkUtil {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
return Arrays.asList(tableResult.getTableSchema().getFieldNames());
}
}
package com.dlink.utils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.operations.Operation;
import java.util.*;
......@@ -22,8 +21,11 @@ public class FlinkUtil {
if (tableOpt.isPresent()) {
return tableOpt.get().getResolvedSchema().getColumnNames();
}else{
return new ArrayList<String>();
return new ArrayList<>();
}
}
public static List<String> catchColumn(TableResult tableResult){
return tableResult.getResolvedSchema().getColumnNames();
}
}
package com.dlink.utils;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ObjectIdentifier;
......@@ -25,4 +26,9 @@ public class FlinkUtil {
return new ArrayList<String>();
}
}
public static List<String> catchColumn(TableResult tableResult){
return tableResult.getResolvedSchema().getColumnNames();
}
}
......@@ -268,7 +268,7 @@ public class JobManager {
job.setJobId(tableResult.getJobClient().get().getJobID().toHexString());
}
if (config.isUseResult()) {
IResult result = ResultBuilder.build(SqlType.INSERT, maxRowNum, "", true).getResult(tableResult);
IResult result = ResultBuilder.build(SqlType.INSERT, maxRowNum, true).getResult(tableResult);
job.setResult(result);
}
}
......@@ -300,7 +300,7 @@ public class JobManager {
job.setJobId(tableResult.getJobClient().get().getJobID().toHexString());
}
if (config.isUseResult()) {
IResult result = ResultBuilder.build(item.getType(), maxRowNum, "", true).getResult(tableResult);
IResult result = ResultBuilder.build(item.getType(), maxRowNum, true).getResult(tableResult);
job.setResult(result);
}
}
......@@ -351,7 +351,7 @@ public class JobManager {
}
LocalDateTime startTime = LocalDateTime.now();
TableResult tableResult = executor.executeSql(newStatement);
IResult result = ResultBuilder.build(operationType, maxRowNum, "", false).getResult(tableResult);
IResult result = ResultBuilder.build(operationType, maxRowNum, false).getResult(tableResult);
result.setStartTime(startTime);
return result;
}
......
......@@ -11,13 +11,13 @@ import org.apache.flink.table.api.TableResult;
**/
public interface ResultBuilder {
static ResultBuilder build(SqlType operationType, Integer maxRowNum, String nullColumn, boolean printRowKind){
static ResultBuilder build(SqlType operationType, Integer maxRowNum, boolean isChangeLog){
switch (operationType){
case SELECT:
return new SelectResultBuilder(maxRowNum,nullColumn,printRowKind);
return new SelectResultBuilder(maxRowNum,isChangeLog);
case SHOW:
case DESCRIBE:
return new ShowResultBuilder(nullColumn,false);
return new ShowResultBuilder(false);
case INSERT:
return new InsertResultBuilder();
default:
......
package com.dlink.result;
import org.apache.flink.table.api.TableColumn;
import com.dlink.constant.FlinkConstant;
import com.dlink.utils.FlinkUtil;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
import org.apache.flink.util.StringUtils;
import java.util.*;
import java.util.stream.Stream;
/**
* ResultRunnable
......@@ -18,81 +19,85 @@ public class ResultRunnable implements Runnable {
private TableResult tableResult;
private Integer maxRowNum;
private boolean printRowKind;
private String nullColumn;
private boolean isChangeLog;
private String nullColumn = "";
public ResultRunnable(TableResult tableResult, Integer maxRowNum, boolean printRowKind, String nullColumn) {
public ResultRunnable(TableResult tableResult, Integer maxRowNum, boolean isChangeLog) {
this.tableResult = tableResult;
this.maxRowNum = maxRowNum;
this.printRowKind = printRowKind;
this.nullColumn = nullColumn;
this.isChangeLog = isChangeLog;
}
@Override
public void run() {
if(tableResult.getJobClient().isPresent()) {
if (tableResult.getJobClient().isPresent()) {
String jobId = tableResult.getJobClient().get().getJobID().toHexString();
if (!ResultPool.containsKey(jobId)) {
ResultPool.put(new SelectResult(jobId, new ArrayList<Map<String, Object>>(), new LinkedHashSet<String>()));
ResultPool.put(new SelectResult(jobId, new ArrayList<>(), new LinkedHashSet<>()));
}
try {
if(isChangeLog) {
catchChangLog(ResultPool.get(jobId));
}else{
catchData(ResultPool.get(jobId));
}catch (Exception e){
}
} catch (Exception e) {
}
}
}
private void catchData(SelectResult selectResult){
List<TableColumn> columns = tableResult.getTableSchema().getTableColumns();
String[] columnNames = columns.stream().map(TableColumn::getName).map(s -> s.replace(" ", "")).toArray((x$0) -> {
return (new String[x$0]);
});
if (printRowKind) {
columnNames = Stream.concat(Stream.of("op"), Arrays.stream(columnNames)).toArray((x$0) -> {
return new String[x$0];
});
}
Set<String> column = new LinkedHashSet(Arrays.asList(columnNames));
private void catchChangLog(SelectResult selectResult) {
List<String> columns = FlinkUtil.catchColumn(tableResult);
columns.add(0, FlinkConstant.OP);
Set<String> column = new LinkedHashSet(columns);
selectResult.setColumns(column);
long numRows = 0L;
List<Map<String, Object>> rows = selectResult.getRowData();
Iterator<Row> it = tableResult.collect();
while (it.hasNext()) {
if (numRows < maxRowNum) {
String[] cols = rowToString(it.next());
Map<String, Object> row = new HashMap<>();
for (int i = 0; i < cols.length; i++) {
if (i > columnNames.length) {
/*column.add("UKN" + i);
row.put("UKN" + i, cols[i]);*/
} else {
// column.add(columnNames[i]);
row.put(columnNames[i], cols[i]);
}
if (rows.size() >= maxRowNum) {
break;
}
rows.add(row);
numRows++;
Map<String, Object> map = new LinkedHashMap<>();
Row row = it.next();
map.put(columns.get(0), row.getKind().shortString());
for (int i = 0; i < row.getArity(); ++i) {
Object field = row.getField(i);
if (field == null) {
map.put(columns.get(i+1), nullColumn);
} else {
break;
map.put(columns.get(i+1), StringUtils.arrayAwareToString(field));
}
}
rows.add(map);
}
}
private String[] rowToString(Row row) {
int len = printRowKind ? row.getArity() + 1 : row.getArity();
List<String> fields = new ArrayList(len);
if (printRowKind) {
fields.add(row.getKind().shortString());
private void catchData(SelectResult selectResult) {
List<String> columns = FlinkUtil.catchColumn(tableResult);
Set<String> column = new LinkedHashSet(columns);
selectResult.setColumns(column);
List<Map<String, Object>> rows = selectResult.getRowData();
Iterator<Row> it = tableResult.collect();
while (it.hasNext()) {
if (rows.size() >= maxRowNum) {
break;
}
Map<String, Object> map = new LinkedHashMap<>();
Row row = it.next();
for (int i = 0; i < row.getArity(); ++i) {
Object field = row.getField(i);
if (field == null) {
fields.add(nullColumn);
map.put(columns.get(i), nullColumn);
} else {
fields.add(StringUtils.arrayAwareToString(field));
map.put(columns.get(i), StringUtils.arrayAwareToString(field));
}
}
if (RowKind.UPDATE_BEFORE == row.getKind() || RowKind.DELETE == row.getKind()) {
rows.remove(map);
}else {
rows.add(map);
}
}
return fields.toArray(new String[0]);
}
}
......@@ -18,20 +18,18 @@ import java.util.stream.Stream;
public class SelectResultBuilder implements ResultBuilder {
private Integer maxRowNum;
private boolean printRowKind;
private String nullColumn;
private boolean isChangeLog;
public SelectResultBuilder(Integer maxRowNum, String nullColumn, boolean printRowKind) {
public SelectResultBuilder(Integer maxRowNum, boolean isChangeLog) {
this.maxRowNum = maxRowNum;
this.printRowKind = printRowKind;
this.nullColumn = nullColumn;
this.isChangeLog = isChangeLog;
}
@Override
public IResult getResult(TableResult tableResult) {
if (tableResult.getJobClient().isPresent()) {
String jobId = tableResult.getJobClient().get().getJobID().toHexString();
ResultRunnable runnable = new ResultRunnable(tableResult, maxRowNum, printRowKind, nullColumn);
ResultRunnable runnable = new ResultRunnable(tableResult, maxRowNum, isChangeLog);
Thread thread = new Thread(runnable, jobId);
thread.start();
return SelectResult.buildSuccess(jobId);
......
......@@ -18,11 +18,10 @@ import java.util.stream.Stream;
public class ShowResultBuilder implements ResultBuilder {
private boolean printRowKind;
private String nullColumn;
private String nullColumn = "";
public ShowResultBuilder(String nullColumn, boolean printRowKind) {
public ShowResultBuilder(boolean printRowKind) {
this.printRowKind = printRowKind;
this.nullColumn = nullColumn;
}
@Override
......
......@@ -24,4 +24,8 @@ public interface FlinkConstant {
* 本地模式host
*/
String LOCAL_HOST = "localhost:8081";
/**
* changlog op
*/
String OP = "op";
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment