sourceColumnMap = buildSourceColumnMap(inputSet, transform);
+
+ matcher = pattern.matcher(finalTransform);
+ String temp;
+ while (matcher.find()) {
+ temp = matcher.group();
+ finalTransform = finalTransform.replace(temp, sourceColumnMap.get(temp));
+ }
+ // temporary special treatment
+ finalTransform = finalTransform.replace("_UTF-16LE", "");
+ LOG.debug("final transform: {}", finalTransform);
+ return finalTransform;
+ }
+
+ /**
+ * According to the order of generating inputSet, generate the corresponding index number.
+ *
+ *
+ * for example: ROW_NUMBER() OVER (PARTITION BY $0 ORDER BY $3 DESC NULLS LAST),
+ * The order of inputSet is $3, $0, instead of $0, $3 obtained by traversing the above string normally
+ */
+ private Map buildSourceColumnMap(Set inputSet, Object transform) {
+ Set traversalSet = new LinkedHashSet<>();
+ if (transform instanceof AggregateCall) {
+ AggregateCall call = ((AggregateCall) transform);
+ traversalSet.addAll(call.getArgList());
+ } else if (transform instanceof RexNode) {
+ RexNode rexNode = (RexNode) transform;
+ RexVisitor visitor = new RexVisitorImpl(true) {
+
+ @Override
+ public Void visitInputRef(RexInputRef inputRef) {
+ traversalSet.add(inputRef.getIndex());
+ return null;
+ }
+
+ @Override
+ public Void visitPatternFieldRef(RexPatternFieldRef fieldRef) {
+ traversalSet.add(fieldRef.getIndex());
+ return null;
+ }
+
+ @Override
+ public Void visitFieldAccess(RexFieldAccess fieldAccess) {
+ traversalSet.add(fieldAccess.toString().replace("$", ""));
+ return null;
+ }
+ };
+ rexNode.accept(visitor);
+ }
+ Map sourceColumnMap = new HashMap<>(INITIAL_CAPACITY);
+ Iterator iterator = optimizeSourceColumnSet(inputSet).iterator();
+ traversalSet.forEach(index -> sourceColumnMap.put("$" + index, iterator.next()));
+ LOG.debug("sourceColumnMap: {}", sourceColumnMap);
+ return sourceColumnMap;
+ }
+
+ /**
+ * Increase the readability of transform.
+ * if catalog, database and table are the same, return field.
+ * If the catalog and database are the same, return the table and field.
+ * If the catalog is the same, return the database, table, field.
+ * Otherwise, return all
+ */
+ private Set optimizeSourceColumnSet(Set inputSet) {
+ Set catalogSet = new HashSet<>();
+ Set databaseSet = new HashSet<>();
+ Set tableSet = new HashSet<>();
+ Set qualifiedSet = new LinkedHashSet<>();
+ for (RelColumnOrigin rco : inputSet) {
+ RelOptTable originTable = rco.getOriginTable();
+ if(originTable == NULL_REL_OPT_TABLE){
+ qualifiedSet.add(rco);
+ continue;
+ }
+ List qualifiedName = originTable.getQualifiedName();
+ // catalog,database,table,field
+ List qualifiedList = new ArrayList<>(qualifiedName);
+ catalogSet.add(qualifiedName.get(0));
+ databaseSet.add(qualifiedName.get(1));
+ tableSet.add(qualifiedName.get(2));
+
+ String field = rco instanceof LineageRelColumnOrigin ? ((LineageRelColumnOrigin) rco).getTransform() : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal());
+ qualifiedList.add(field);
+ qualifiedSet.add(qualifiedList);
+ }
+
+ if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) {
+ return optimizeName(qualifiedSet, e -> e.get(3));
+ } else if (catalogSet.size() == 1 && databaseSet.size() == 1) {
+ return optimizeName(qualifiedSet, e->String.join(DELIMITER, e.subList(2, 4)));
+ } else if (catalogSet.size() == 1) {
+ return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4)));
+ } else {
+ return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e));
+ }
+ }
+
+ private Set optimizeName(Set qualifiedSet, Function, String> mapper) {
+ return qualifiedSet.stream().map(o -> o instanceof LineageRelColumnOrigin
+ ? ((LineageRelColumnOrigin) o).getTransform()
+ : mapper.apply((List) o)).collect(Collectors.toCollection(LinkedHashSet::new));
+ }
+
+ private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) {
+ final Set set = new LinkedHashSet<>();
+ final RexVisitor visitor = new RexVisitorImpl(true) {
+
+ @Override
+ public Void visitInputRef(RexInputRef inputRef) {
+ Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex());
+ if (inputSet != null) {
+ set.addAll(inputSet);
+ }
+ return null;
+ }
+ };
+ rexNode.accept(visitor);
+ return set;
+ }
+}
diff --git a/dinky-admin/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java b/dinky-admin/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java
new file mode 100644
index 0000000000..631fcf3101
--- /dev/null
+++ b/dinky-admin/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.delegation;
+
+import org.apache.flink.table.api.TableException;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogManager;
+import org.apache.flink.table.catalog.UnresolvedIdentifier;
+import org.apache.flink.table.delegation.Parser;
+import org.apache.flink.table.expressions.ResolvedExpression;
+import org.apache.flink.table.operations.ModifyOperation;
+import org.apache.flink.table.operations.Operation;
+import org.apache.flink.table.planner.calcite.FlinkPlannerImpl;
+import org.apache.flink.table.planner.calcite.FlinkTypeFactory;
+import org.apache.flink.table.planner.calcite.RexFactory;
+import org.apache.flink.table.planner.calcite.SqlToRexConverter;
+import org.apache.flink.table.planner.expressions.RexNodeExpression;
+import org.apache.flink.table.planner.operations.SqlNodeToOperationConversion;
+import org.apache.flink.table.planner.parse.CalciteParser;
+import org.apache.flink.table.planner.parse.ExtendedParser;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+import org.apache.flink.table.types.utils.TypeConversions;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlNodeList;
+import org.apache.calcite.sql.advise.SqlAdvisor;
+import org.apache.calcite.sql.advise.SqlAdvisorValidator;
+import org.apache.lineage.flink.sql.metadata.LineageHandler;
+
+import javax.annotation.Nullable;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+/** Implementation of {@link Parser} that uses Calcite. */
+public class ParserImpl implements Parser {
+
+ private final CatalogManager catalogManager;
+
+ // we use supplier pattern here in order to use the most up to
+ // date configuration. Users might change the parser configuration in a TableConfig in between
+ // multiple statements parsing
+ private final Supplier validatorSupplier;
+ private final Supplier calciteParserSupplier;
+ private final RexFactory rexFactory;
+ private static final ExtendedParser EXTENDED_PARSER = ExtendedParser.INSTANCE;
+
+ public ParserImpl(
+ CatalogManager catalogManager,
+ Supplier validatorSupplier,
+ Supplier calciteParserSupplier,
+ RexFactory rexFactory) {
+ this.catalogManager = catalogManager;
+ this.validatorSupplier = validatorSupplier;
+ this.calciteParserSupplier = calciteParserSupplier;
+ this.rexFactory = rexFactory;
+ }
+
+ /**
+ * When parsing statement, it first uses {@link ExtendedParser} to parse statements. If {@link
+ * ExtendedParser} fails to parse statement, it uses the {@link CalciteParser} to parse
+ * statements.
+ *
+ * @param statement input statement.
+ * @return parsed operations.
+ */
+ @Override
+ public List parse(String statement) {
+ CalciteParser parser = calciteParserSupplier.get();
+ FlinkPlannerImpl planner = validatorSupplier.get();
+
+ Optional command = EXTENDED_PARSER.parse(statement);
+ if (command.isPresent()) {
+ return Collections.singletonList(command.get());
+ }
+
+ // parse the sql query
+ // use parseSqlList here because we need to support statement end with ';' in sql client.
+ SqlNodeList sqlNodeList = parser.parseSqlList(statement);
+ List parsed = sqlNodeList.getList();
+ Preconditions.checkArgument(parsed.size() == 1, "only single statement supported");
+ Operation operation = SqlNodeToOperationConversion.convert(planner, catalogManager, parsed.get(0))
+ .orElseThrow(() -> new TableException("Unsupported query: " + statement));
+ if(operation instanceof ModifyOperation){
+ LineageHandler.analyze(this.catalogManager, operation, statement);
+ }
+ return Collections.singletonList(operation);
+ }
+
+ @Override
+ public UnresolvedIdentifier parseIdentifier(String identifier) {
+ CalciteParser parser = calciteParserSupplier.get();
+ SqlIdentifier sqlIdentifier = parser.parseIdentifier(identifier);
+ return UnresolvedIdentifier.of(sqlIdentifier.names);
+ }
+
+ @Override
+ public ResolvedExpression parseSqlExpression(
+ String sqlExpression, RowType inputRowType, @Nullable LogicalType outputType) {
+ try {
+ final SqlToRexConverter sqlToRexConverter =
+ rexFactory.createSqlToRexConverter(inputRowType, outputType);
+ final RexNode rexNode = sqlToRexConverter.convertToRexNode(sqlExpression);
+ final LogicalType logicalType = FlinkTypeFactory.toLogicalType(rexNode.getType());
+ // expand expression for serializable expression strings similar to views
+ final String sqlExpressionExpanded = sqlToRexConverter.expand(sqlExpression);
+ return new RexNodeExpression(
+ rexNode,
+ TypeConversions.fromLogicalToDataType(logicalType),
+ sqlExpression,
+ sqlExpressionExpanded);
+ } catch (Throwable t) {
+ throw new ValidationException(
+ String.format("Invalid SQL expression: %s", sqlExpression), t);
+ }
+ }
+
+ public String[] getCompletionHints(String statement, int cursor) {
+ List candidates =
+ new ArrayList<>(
+ Arrays.asList(EXTENDED_PARSER.getCompletionHints(statement, cursor)));
+
+ // use sql advisor
+ SqlAdvisorValidator validator = validatorSupplier.get().getSqlAdvisorValidator();
+ SqlAdvisor advisor =
+ new SqlAdvisor(validator, validatorSupplier.get().config().getParserConfig());
+ String[] replaced = new String[1];
+
+ List sqlHints =
+ advisor.getCompletionHints(statement, cursor, replaced).stream()
+ .map(item -> item.toIdentifier().toString())
+ .collect(Collectors.toList());
+
+ candidates.addAll(sqlHints);
+
+ return candidates.toArray(new String[0]);
+ }
+
+ public CatalogManager getCatalogManager() {
+ return catalogManager;
+ }
+}
diff --git a/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/Constant.java b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/Constant.java
new file mode 100644
index 0000000000..972442ce80
--- /dev/null
+++ b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/Constant.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lineage.flink.sql.metadata;
+
+public class Constant {
+
+ private Constant() {
+ throw new IllegalStateException("Utility class");
+ }
+
+ public static final String DELIMITER = ".";
+
+ public static final int INITIAL_CAPACITY = 16;
+
+ public static final String ILLEGAL_PARAM = "illegal param";
+
+ public static final Long DEFAULT_USER_ID = 0L;
+}
diff --git a/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageHandler.java b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageHandler.java
new file mode 100644
index 0000000000..8753c1f5f6
--- /dev/null
+++ b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageHandler.java
@@ -0,0 +1,185 @@
+package org.apache.lineage.flink.sql.metadata;
+
+import io.openlineage.client.Clients;
+import io.openlineage.client.OpenLineage;
+import io.openlineage.client.OpenLineage.*;
+import io.openlineage.client.OpenLineageClient;
+import io.openlineage.client.utils.UUIDUtils;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.metadata.RelColumnOrigin;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.api.internal.TableEnvironmentInternal;
+import org.apache.flink.table.catalog.*;
+import org.apache.flink.table.operations.*;
+import org.apache.flink.table.operations.ddl.CreateTableOperation;
+import org.apache.flink.table.planner.operations.PlannerQueryOperation;
+import org.apache.flink.table.planner.plan.schema.TableSourceTable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.*;
+import java.util.stream.Collectors;
+
+import static org.apache.lineage.flink.sql.metadata.Constant.DELIMITER;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/9/23 16:27
+ * Description:
+ */
+public class LineageHandler {
+ protected static final Logger LOG = LoggerFactory.getLogger(LineageHandler.class);
+ protected static final OpenLineage openLineage;
+ protected static OpenLineageClient openLineageClient = null;
+
+ public static OpenLineageClient getClient() {
+ return openLineageClient;
+ }
+
+ public static OpenLineage getOpenLineage() {
+ return openLineage;
+ }
+
+ public static void init() {
+ try {
+ openLineageClient = Clients.newClient();
+ } catch (Exception exception) {
+ exception.printStackTrace();
+ }
+ }
+
+ static {
+ openLineage = new OpenLineage(URI.create("https://github.com/apache/flink"));
+ init();
+ }
+
+ public static void analyze(TableEnvironmentInternal internal, Operation operation, String statement) {
+ analyze(internal.getCatalogManager(), operation, statement);
+ }
+
+ public static void analyze(CatalogManager catalogManager, Operation operation, String statement) {
+ if (openLineageClient == null) {
+ return;
+ }
+
+ if (operation instanceof ModifyOperation) {
+ ContextResolvedTable contextResolvedTable = null;
+ if (operation instanceof SinkModifyOperation) {
+ contextResolvedTable = ((SinkModifyOperation) operation).getContextResolvedTable();
+ } else if (operation instanceof CreateTableASOperation) {
+ CreateTableOperation ctOperation = ((CreateTableASOperation) operation).getCreateTableOperation();
+ contextResolvedTable = ContextResolvedTable.permanent(ctOperation.getTableIdentifier(), catalogManager.getCatalog(ctOperation.getTableIdentifier().getCatalogName()).orElse(null), catalogManager.resolveCatalogBaseTable(ctOperation.getCatalogTable()));
+ } else if (operation instanceof ExternalModifyOperation) {
+ contextResolvedTable = ((ExternalModifyOperation) operation).getContextResolvedTable();
+ }
+ if (contextResolvedTable != null) {
+ build(contextResolvedTable, ((PlannerQueryOperation) ((ModifyOperation) operation).getChild()).getCalciteTree(), statement);
+ }
+ }
+ }
+
+ protected static void build(ContextResolvedTable contextResolvedTable, RelNode optRelNode, String statement) {
+ ObjectIdentifier sinkTable = contextResolvedTable.getIdentifier();
+ String sinkSummary = sinkTable.asSummaryString();
+ ResolvedSchema resolvedSchema = contextResolvedTable.getResolvedSchema();
+
+ // target columns
+ List targetColumnList = resolvedSchema.getColumns();
+ // check the size of query and sink fields match
+ validateSchema(sinkSummary, optRelNode, targetColumnList);
+
+ List sinkFields = new ArrayList<>();
+ Map sourceDataset = new LinkedHashMap<>();
+ ColumnLineageDatasetFacetFieldsBuilder columnLineageFields = new ColumnLineageDatasetFacetFieldsBuilder();
+
+ RelMetadataQuery metadataQuery = optRelNode.getCluster().getMetadataQuery();
+ for (int index = 0; index < targetColumnList.size(); index++) {
+ Column column = targetColumnList.get(index);
+ String targetColumn = column.getName();
+
+ LOG.debug("**********************************************************");
+ LOG.debug("==> Target table: {}, column: {}, type: {}, description: {}", sinkSummary, targetColumn, column.getDataType().toString(), column.getComment().orElse(null));
+ sinkFields.add(openLineage.newSchemaDatasetFacetFieldsBuilder().name(targetColumn).type(column.getDataType().toString()).description(column.getComment().orElse(null)).build());
+ List additionalInputFields = new ArrayList<>();
+ columnLineageFields.put(targetColumn, new ColumnLineageDatasetFacetFieldsAdditionalBuilder().inputFields(additionalInputFields).build());
+
+ Set relColumnOriginSet = metadataQuery.getColumnOrigins(optRelNode, index);
+ if (CollectionUtils.isNotEmpty(relColumnOriginSet)) {
+ for (RelColumnOrigin rco : relColumnOriginSet) {
+ String sourceTable = null;
+ String sourceColumn = null;
+ ColumnLineageDatasetFacetFieldsAdditionalInputFieldsBuilder fieldsBuilder = new ColumnLineageDatasetFacetFieldsAdditionalInputFieldsBuilder();
+ // table
+ RelOptTable table = rco.getOriginTable();
+ if (!(table instanceof LineageRelColumnOrigin.NullRelOptTable)) {
+ sourceTable = String.join(DELIMITER, table.getQualifiedName());
+ // filed
+ int ordinal = rco.getOriginColumnOrdinal();
+ List fieldNames = ((TableSourceTable) table).contextResolvedTable().getResolvedSchema().getColumnNames();
+ sourceColumn = fieldNames.get(ordinal);
+ if (!sourceDataset.containsKey(sourceTable)) {
+ sourceDataset.put(sourceTable, table);
+ }
+ } else {
+ fieldsBuilder.namespace(sinkTable.getCatalogName());
+ }
+// LOG.debug("----------------------------------------------------------");
+// LOG.debug("Source table: {}", sourceTable);
+// LOG.debug("Source column: {}", sourceColumn);
+ String transform = LineageRelColumnOrigin.getTransform(rco);
+ if (StringUtils.isNotEmpty(transform)) {
+// LOG.debug("transform: {}", transform);
+ fieldsBuilder.transformations(Collections.singletonList(openLineage.newColumnLineageDatasetFacetFieldsAdditionalInputFieldsTransformationsBuilder().description(transform).put("operation", transform).build()));
+ }
+
+ if (StringUtils.isNotEmpty(sourceColumn)) {
+ fieldsBuilder.namespace(table.getQualifiedName().get(0)).name(sourceTable).field(sourceColumn);
+ additionalInputFields.add(fieldsBuilder.build());
+ }
+
+ LOG.debug("==> source table: {}, column: {}, transform: {}", sourceTable, sourceColumn, transform);
+ }
+ }
+ }
+
+ OutputDataset outputDatasets = openLineage.newOutputDatasetBuilder().namespace(sinkTable.getCatalogName()).name(sinkSummary) //.name(sinkTable.getDatabaseName()+ DELIMITER +sinkTable.getObjectName())
+ .facets(openLineage.newDatasetFacetsBuilder().columnLineage(openLineage.newColumnLineageDatasetFacetBuilder().fields(columnLineageFields.build()).build()).dataSource(openLineage.newDatasourceDatasetFacet(sinkTable.getCatalogName() + "." + sinkTable.getDatabaseName(), URI.create("flink://" + sinkTable.getCatalogName() + "/" + sinkTable.getDatabaseName()))).schema(openLineage.newSchemaDatasetFacetBuilder().fields(sinkFields).build()).build()).build();
+
+
+ // 2. Build lineage based from RelMetadataQuery
+ ZonedDateTime now = ZonedDateTime.now(ZoneId.of("UTC"));
+ UUID runId = UUIDUtils.generateNewUUID();
+ JobFacets jobFacets = openLineage.newJobFacetsBuilder().sql(openLineage.newSQLJobFacet(statement)).documentation(openLineage.newDocumentationJobFacet("flink application .")).build();
+
+ RunEventBuilder runEventBuilder = openLineage.newRunEventBuilder().eventType(RunEvent.EventType.COMPLETE).eventTime(now).run(openLineage.newRunBuilder().runId(runId).facets(openLineage.newRunFacetsBuilder().nominalTime(openLineage.newNominalTimeRunFacet(now, now)).build()).build()).job(openLineage.newJobBuilder().namespace(sinkTable.getCatalogName()).name(sinkTable.asSummaryString()).facets(jobFacets).build());
+
+ RunEvent runEvent = runEventBuilder.inputs(sourceDataset.entrySet().stream().map(entry -> {
+ String name = entry.getKey();
+ RelOptTable table = entry.getValue();
+ List qualifiedName = table.getQualifiedName();
+ String namespace = qualifiedName.size()>2 ? qualifiedName.get(0) : "default_catalog";
+ String db = qualifiedName.size()>1 ? qualifiedName.get(qualifiedName.size()-1) : "default_database";
+ return openLineage.newInputDatasetBuilder().namespace(table.getQualifiedName().get(0)).name(name).facets(openLineage.newDatasetFacetsBuilder().dataSource(openLineage.newDatasourceDatasetFacet(namespace + DELIMITER + db, URI.create("flink://" + namespace + "/" + db))).schema(openLineage.newSchemaDatasetFacetBuilder().fields(table.getRowType().getFieldList().stream().map(field -> openLineage.newSchemaDatasetFacetFieldsBuilder().name(field.getName()).type(field.getType().toString()).description(field.getName()).build()).collect(Collectors.toList())).build()).build()).build();
+ }).collect(Collectors.toList())).outputs(Collections.singletonList(outputDatasets)).build();
+
+// String event = OpenLineageClientUtils.toJson(runEvent);
+// LOG.info("event: {}", event);
+ LOG.info("==> openLineage emit...");
+ openLineageClient.emit(runEvent);
+ }
+
+
+ private static void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) {
+ List queryFieldList = relNode.getRowType().getFieldNames();
+ if (queryFieldList.size() != sinkFieldList.size()) {
+ throw new ValidationException(String.format("Column types of query result and sink for %s do not match.\n" + "Query schema: %s\n" + "Sink schema: %s", sinkTable, queryFieldList, sinkFieldList));
+ }
+ }
+}
diff --git a/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageRelColumnOrigin.java b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageRelColumnOrigin.java
new file mode 100644
index 0000000000..4b334b0053
--- /dev/null
+++ b/dinky-admin/src/main/java/org/apache/lineage/flink/sql/metadata/LineageRelColumnOrigin.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lineage.flink.sql.metadata;
+
+import org.apache.calcite.linq4j.tree.Expression;
+import org.apache.calcite.plan.RelOptSchema;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.RelCollation;
+import org.apache.calcite.rel.RelDistribution;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.RelReferentialConstraint;
+import org.apache.calcite.rel.metadata.RelColumnOrigin;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.schema.ColumnStrategy;
+import org.apache.calcite.util.ImmutableBitSet;
+
+import java.util.Collections;
+import java.util.List;
+
+public class LineageRelColumnOrigin extends RelColumnOrigin {
+ /**
+ * Stores the expression for data conversion,
+ * which source table fields are transformed by which expression the target field
+ */
+ private String transform;
+
+ // ~ Constructors -----------------------------------------------------------
+
+ public LineageRelColumnOrigin(
+ RelOptTable originTable,
+ int iOriginColumn,
+ boolean isDerived) {
+ super(originTable, iOriginColumn, isDerived);
+ }
+
+ public LineageRelColumnOrigin(
+ RelOptTable originTable,
+ int iOriginColumn,
+ boolean isDerived,
+ String transform) {
+ super(originTable, iOriginColumn, isDerived);
+ this.transform = transform;
+ }
+
+ public LineageRelColumnOrigin(RelColumnOrigin relColumnOrigin) {
+ super(relColumnOrigin.getOriginTable(), relColumnOrigin.getOriginColumnOrdinal(), relColumnOrigin.isDerived());
+ }
+
+ public LineageRelColumnOrigin(
+ RelColumnOrigin relColumnOrigin,
+ String transform) {
+ this(relColumnOrigin);
+ this.transform = transform;
+ }
+
+ public String getTransform() {
+ return transform;
+ }
+
+ public static String getTransform(RelColumnOrigin relColumnOrigin) {
+ return relColumnOrigin instanceof LineageRelColumnOrigin ? ((LineageRelColumnOrigin) relColumnOrigin).getTransform() : null;
+ }
+
+ public static final NullRelOptTable NULL_REL_OPT_TABLE = new NullRelOptTable();
+ public static class NullRelOptTable implements RelOptTable {
+ @Override
+ public List getQualifiedName() {
+ return Collections.emptyList();
+ }
+
+ @Override
+ public double getRowCount() {
+ return 0;
+ }
+
+ @Override
+ public RelDataType getRowType() {
+ return null;
+ }
+
+ @Override
+ public RelOptSchema getRelOptSchema() {
+ return null;
+ }
+
+ @Override
+ public RelNode toRel(ToRelContext toRelContext) {
+ return null;
+ }
+
+ @Override
+ public List getCollationList() {
+ return null;
+ }
+
+ @Override
+ public RelDistribution getDistribution() {
+ return null;
+ }
+
+ @Override
+ public boolean isKey(ImmutableBitSet immutableBitSet) {
+ return false;
+ }
+
+ @Override
+ public List getKeys() {
+ return null;
+ }
+
+ @Override
+ public List getReferentialConstraints() {
+ return null;
+ }
+
+ @Override
+ public Expression getExpression(Class aClass) {
+ return null;
+ }
+
+ @Override
+ public RelOptTable extend(List list) {
+ return null;
+ }
+
+ @Override
+ public List getColumnStrategies() {
+ return null;
+ }
+
+ @Override
+ public C unwrap(Class aClass) {
+ return null;
+ }
+ }
+}
diff --git a/dinky-admin/src/main/java/org/dinky/Dinky.java b/dinky-admin/src/main/java/org/dinky/Dinky.java
index 4f717bb4a5..9039a8514c 100644
--- a/dinky-admin/src/main/java/org/dinky/Dinky.java
+++ b/dinky-admin/src/main/java/org/dinky/Dinky.java
@@ -22,9 +22,11 @@
import org.dinky.data.constant.DirConstant;
import org.dinky.security.NoExitSecurityManager;
+import org.dinky.ltpa.config.LtpaTokenProperties;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.freemarker.FreeMarkerAutoConfiguration;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.transaction.annotation.EnableTransactionManagement;
@@ -41,6 +43,7 @@
@EnableTransactionManagement
@SpringBootApplication(exclude = FreeMarkerAutoConfiguration.class)
@EnableCaching
+@EnableConfigurationProperties(LtpaTokenProperties.class)
public class Dinky {
static {
diff --git a/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java b/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java
index e444a0d2eb..70d8fded70 100644
--- a/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java
+++ b/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java
@@ -19,12 +19,14 @@
package org.dinky.configure;
+import cn.dev33.satoken.exception.StopMatchException;
+import cn.dev33.satoken.interceptor.SaInterceptor;
+import cn.dev33.satoken.router.SaRouter;
+import cn.dev33.satoken.stp.StpUtil;
import org.dinky.data.constant.BaseConstant;
import org.dinky.interceptor.LocaleChangeInterceptor;
import org.dinky.interceptor.TenantInterceptor;
-
-import java.util.Locale;
-
+import org.dinky.ltpa.interceptor.LtpaTokenInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.LocaleResolver;
@@ -32,10 +34,8 @@
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.i18n.CookieLocaleResolver;
-import cn.dev33.satoken.exception.StopMatchException;
-import cn.dev33.satoken.interceptor.SaInterceptor;
-import cn.dev33.satoken.router.SaRouter;
-import cn.dev33.satoken.stp.StpUtil;
+import javax.annotation.Resource;
+import java.util.Locale;
/**
* AppConfiguration
@@ -44,6 +44,10 @@
*/
@Configuration
public class AppConfig implements WebMvcConfigurer {
+ @Resource
+ private LtpaTokenInterceptor ltpaTokenInterceptor;
+
+
/**
* Cookie
*
@@ -73,6 +77,13 @@ public LocaleChangeInterceptor localeChangeInterceptor() {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(localeChangeInterceptor());
+ // 注册 LTPA Token 拦截器
+ registry.addInterceptor(ltpaTokenInterceptor)
+// .addPathPatterns("/**")
+ .addPathPatterns("/api/**", "/openapi/**")
+ .excludePathPatterns("/api/login", "/api/ldap/ldapEnableStatus", "/download/**", "/druid/**");
+ ;
+
// 注册Sa-Token的路由拦截器
registry.addInterceptor(new SaInterceptor(handler -> {
SaRouter.match("/openapi/**", r -> {
diff --git a/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java b/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java
index 05939563ba..e0b5c4b9cb 100644
--- a/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java
+++ b/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java
@@ -21,6 +21,7 @@
import static org.dinky.ws.GlobalWebSocket.sendTopic;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.dinky.aop.ProcessAspect;
import org.dinky.data.constant.DirConstant;
import org.dinky.data.enums.ProcessStatus;
@@ -233,6 +234,13 @@ public ProcessStepEntity registerProcessStep(ProcessStepType type, String proces
public synchronized void finishedProcess(String processName, ProcessStatus status, Throwable e) {
ProcessEntity process = logPross.get(processName);
try {
+ if(process==null) {
+ if(e!=null) {
+ appendLog(processName, null, LogUtil.getError(e.getCause()), true);
+ log.error("[{}] process is null, finishedProcess error: {}", processName, ExceptionUtils.getStackTrace(e));
+ }
+ return;
+ }
process.setStatus(status);
process.setEndTime(LocalDateTime.now());
process.setTime(Duration.between(process.getStartTime(), process.getEndTime())
diff --git a/dinky-admin/src/main/java/org/dinky/controller/APIController.java b/dinky-admin/src/main/java/org/dinky/controller/APIController.java
index 6b0ac4c0fc..7510a775f3 100644
--- a/dinky-admin/src/main/java/org/dinky/controller/APIController.java
+++ b/dinky-admin/src/main/java/org/dinky/controller/APIController.java
@@ -19,6 +19,7 @@
package org.dinky.controller;
+import org.apache.commons.beanutils.BeanUtils;
import org.dinky.DinkyVersion;
import org.dinky.data.annotations.Log;
import org.dinky.data.dto.APISavePointTaskDTO;
@@ -39,13 +40,9 @@
import org.dinky.service.TaskService;
import java.util.List;
+import java.util.Map;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.bind.annotation.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -76,10 +73,11 @@ public Result getVersionInfo() {
return Result.succeed(DinkyVersion.getVersion(), Status.QUERY_SUCCESS);
}
- @PostMapping("/submitTask")
+ @RequestMapping(value = "/submitTask", method = {RequestMethod.POST, RequestMethod.GET})
@ApiOperation("Submit Task")
// @Log(title = "Submit Task", businessType = BusinessType.SUBMIT)
- public Result submitTask(@RequestBody TaskSubmitDto submitDto) throws Exception {
+ public Result submitTask(@RequestBody(required = false) TaskSubmitDto submitDto, @RequestParam(required = false) Map params) throws Exception {
+ BeanUtils.populate(submitDto= submitDto==null ? new TaskSubmitDto() : submitDto, params);
taskService.initTenantByTaskId(submitDto.getId());
JobResult jobResult = taskService.submitTask(submitDto);
if (jobResult.isSuccess()) {
diff --git a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java
index c822e03cfd..a3ea358cca 100644
--- a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java
+++ b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java
@@ -127,12 +127,12 @@ public Result upload(MultipartFile file, @PathVariable Integer id) {
required = true,
dataType = "Catalogue",
dataTypeClass = Catalogue.class)
- public Result saveOrUpdateCatalogue(@RequestBody Catalogue catalogue) {
+ public Result saveOrUpdateCatalogue(@RequestBody Catalogue catalogue) {
if (catalogueService.checkNameIsExistByParentId(catalogue)) {
return Result.failed(Status.NAME_IS_EXIST);
}
if (catalogueService.saveOrUpdateOrRename(catalogue)) {
- return Result.succeed(Status.SAVE_SUCCESS);
+ return Result.succeed(catalogue, Status.SAVE_SUCCESS);
} else {
return Result.failed(Status.SAVE_FAILED);
}
diff --git a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java
index 1859cb1a8e..aa308c7a0b 100644
--- a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java
+++ b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java
@@ -19,6 +19,7 @@
package org.dinky.controller;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.dinky.config.Dialect;
import org.dinky.data.annotations.CheckTaskApproval;
import org.dinky.data.annotations.CheckTaskOwner;
@@ -34,6 +35,7 @@
import org.dinky.data.enums.JobLifeCycle;
import org.dinky.data.enums.ProcessType;
import org.dinky.data.enums.Status;
+import org.dinky.data.enums.CodeEnum;
import org.dinky.data.exception.NotSupportExplainExcepition;
import org.dinky.data.exception.SqlExplainExcepition;
import org.dinky.data.model.JarSubmitParam;
@@ -178,11 +180,18 @@ public Result savepoint(@TaskId @RequestParam Integer taskId, @
@CheckTaskOwner(checkParam = TaskId.class, checkInterface = TaskService.class)
public Result changeTaskLife(@TaskId @RequestParam Integer taskId, @RequestParam Integer lifeCycle)
throws SqlExplainExcepition {
- if (taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.get(lifeCycle))) {
- return Result.succeed(lifeCycle == 2 ? Status.PUBLISH_SUCCESS : Status.OFFLINE_SUCCESS);
- } else {
- return Result.failed(lifeCycle == 2 ? Status.PUBLISH_FAILED : Status.OFFLINE_FAILED);
+ // 返回UDF编译错误原因,以便前端排除
+ String msg="";
+ try {
+ if (taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.get(lifeCycle))) {
+ return Result.succeed(true, lifeCycle == 2 ? Status.PUBLISH_SUCCESS : Status.OFFLINE_SUCCESS);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ msg = ExceptionUtils.getStackTrace(e);
}
+ return Result.of(false, CodeEnum.ERROR.getCode(),
+ String.format("%s.\n%s", (lifeCycle == 2 ? Status.PUBLISH_FAILED : Status.OFFLINE_FAILED).getMessage(), msg));
}
@PostMapping("/explainSql")
diff --git a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java
index a2c04c7840..2236f3c21e 100644
--- a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java
+++ b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java
@@ -217,7 +217,8 @@ public static Project getProject() {
public void registerUDF() {
List allUDF = taskService.getReleaseUDF();
if (CollUtil.isNotEmpty(allUDF)) {
- UdfCodePool.registerPool(allUDF.stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()));
+ // 启动只打印错误
+ UdfCodePool.registerPool(allUDF.stream().map(UDFUtils::taskToUDFSilent).collect(Collectors.toList()));
}
UdfCodePool.updateGitPool(gitProjectService.getGitPool());
}
diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/ClearJobHistoryHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/ClearJobHistoryHandler.java
index 107e57dbc6..e7ccef8b34 100644
--- a/dinky-admin/src/main/java/org/dinky/job/handler/ClearJobHistoryHandler.java
+++ b/dinky-admin/src/main/java/org/dinky/job/handler/ClearJobHistoryHandler.java
@@ -19,6 +19,11 @@
package org.dinky.job.handler;
+import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
+import com.baomidou.mybatisplus.core.toolkit.support.SFunction;
+import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper;
+import lombok.Builder;
import org.dinky.assertion.Asserts;
import org.dinky.data.model.ClusterInstance;
import org.dinky.data.model.job.History;
@@ -32,10 +37,6 @@
import java.util.List;
import java.util.stream.Collectors;
-import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
-
-import lombok.Builder;
-
@Builder
public class ClearJobHistoryHandler {
private JobInstanceService jobInstanceService;
@@ -45,7 +46,8 @@ public class ClearJobHistoryHandler {
/**
* Clears job history records based on the specified criteria.
- * @param maxRetainDays The maximum number of days to retain job history.
+ *
+ * @param maxRetainDays The maximum number of days to retain job history.
* @param maxRetainCount The maximum count to retain job history.
*/
public void clearJobHistory(Integer maxRetainDays, Integer maxRetainCount) {
@@ -107,7 +109,8 @@ public void clearJobHistory(Integer maxRetainDays, Integer maxRetainCount) {
/**
* Clears dinky history records based on the specified criteria.
- * @param maxRetainDays The maximum number of days to retain dinky history.
+ *
+ * @param maxRetainDays The maximum number of days to retain dinky history.
* @param maxRetainCount The maximum count to retain dinky history.
*/
public void clearDinkyHistory(Integer maxRetainDays, Integer maxRetainCount) {
@@ -122,17 +125,14 @@ public void clearDinkyHistory(Integer maxRetainDays, Integer maxRetainCount) {
for (History history : historyList) {
// Check if the count exceeds the maximum retain count
if (history.getCount() > maxRetainCount) {
- List reservedHistory = historyService
- .lambdaQuery()
- .eq(History::getTaskId, history.getTaskId())
+ List reservedHistory = eq(historyService.lambdaQuery(), History::getTaskId, history.getTaskId())
.orderByDesc(History::getId)
.last("limit " + maxRetainCount)
.list();
// Create a query wrapper to delete history records older than the maximum retain days
QueryWrapper deleteWrapper = new QueryWrapper<>();
- deleteWrapper
- .lambda()
- .eq(History::getTaskId, history.getTaskId())
+ eq(deleteWrapper
+ .lambda(), History::getTaskId, history.getTaskId())
.lt(History::getStartTime, LocalDateTime.now().minusDays(maxRetainDays))
.notIn(
true,
@@ -142,4 +142,12 @@ public void clearDinkyHistory(Integer maxRetainDays, Integer maxRetainCount) {
}
}
}
+
+ public static LambdaQueryWrapper eq(LambdaQueryWrapper wrapper, SFunction column, Object val) {
+ return val == null ? wrapper.isNull(column) : wrapper.eq(true, column, val);
+ }
+
+ public static LambdaQueryChainWrapper eq(LambdaQueryChainWrapper wrapper, SFunction column, Object val) {
+ return val == null ? wrapper.isNull(column) : wrapper.eq(true, column, val);
+ }
}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaSetting.java b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaSetting.java
new file mode 100644
index 0000000000..651ea0d239
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaSetting.java
@@ -0,0 +1,45 @@
+package org.dinky.ltpa.config;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/7/3 15:08
+ * Description:
+ */
+public class LtpaSetting {
+ protected String secret = "2OqzZkZ//RvOLF+X1HqNJWcCBHE="; // 秘钥
+ protected long expiration = 43200L; // 默认过期-LTPA_Validity时间为43200秒(12小时)
+ protected long transition = 300L; // 过渡时间-LTPA_TokenExpiration.保证各服务时间不同步时的误差,过期后的这个时间内仍然有效,默认300秒(5分钟)
+// protected String version="00010203"; //LtpaToken 版本(长度4),Domino的固定为[0x00][0x01][0x02][0x03]
+
+ public String getSecret() {
+ return secret;
+ }
+
+ public void setSecret(String secret) {
+ this.secret = secret;
+ }
+
+ public long getExpiration() {
+ return expiration;
+ }
+
+ public void setExpiration(long expiration) {
+ this.expiration = expiration;
+ }
+
+ public long getTransition() {
+ return transition;
+ }
+
+ public void setTransition(long transition) {
+ this.transition = transition;
+ }
+
+// public String getVersion() {
+// return version;
+// }
+//
+// public void setVersion(String version) {
+// this.version = version;
+// }
+}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenProperties.java b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenProperties.java
new file mode 100644
index 0000000000..b453c3b1f9
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenProperties.java
@@ -0,0 +1,15 @@
+package org.dinky.ltpa.config;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+import java.util.HashMap;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/6/30 11:21
+ * Description:
+ */
+@ConfigurationProperties(prefix = "ltpa", ignoreInvalidFields = true)
+public class LtpaTokenProperties extends HashMap {
+
+}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenSetting.java b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenSetting.java
new file mode 100644
index 0000000000..dad0e74faf
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/config/LtpaTokenSetting.java
@@ -0,0 +1,70 @@
+package org.dinky.ltpa.config;
+
+import java.util.List;
+import java.util.regex.Pattern;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/6/30 14:23
+ * Description:
+ */
+public class LtpaTokenSetting extends LtpaSetting {
+ private Pattern originRegExp;
+
+ private String originRegVal;
+
+ private Pattern targetRegExp;
+
+ private String targetRegVal;
+ private String domain;
+
+ private List clearCookies;
+
+ public Pattern getOriginRegExp() {
+ return originRegExp;
+ }
+
+ public void setOriginRegExp(Pattern originRegExp) {
+ this.originRegExp = originRegExp;
+ }
+
+ public String getOriginRegVal() {
+ return originRegVal;
+ }
+
+ public void setOriginRegVal(String originRegVal) {
+ this.originRegVal = originRegVal;
+ }
+
+ public Pattern getTargetRegExp() {
+ return targetRegExp;
+ }
+
+ public void setTargetRegExp(Pattern targetRegExp) {
+ this.targetRegExp = targetRegExp;
+ }
+
+ public String getTargetRegVal() {
+ return targetRegVal;
+ }
+
+ public void setTargetRegVal(String targetRegVal) {
+ this.targetRegVal = targetRegVal;
+ }
+
+ public String getDomain() {
+ return domain;
+ }
+
+ public void setDomain(String domain) {
+ this.domain = domain;
+ }
+
+ public List getClearCookies() {
+ return clearCookies;
+ }
+
+ public void setClearCookies(List clearCookies) {
+ this.clearCookies = clearCookies;
+ }
+}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/handler/LtpaTokenHandler.java b/dinky-admin/src/main/java/org/dinky/ltpa/handler/LtpaTokenHandler.java
new file mode 100644
index 0000000000..bfb61c8cf9
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/handler/LtpaTokenHandler.java
@@ -0,0 +1,137 @@
+package org.dinky.ltpa.handler;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.dinky.ltpa.config.LtpaTokenProperties;
+import org.dinky.ltpa.config.LtpaTokenSetting;
+import org.dinky.ltpa.token.LtpaToken;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import java.util.function.BiFunction;
+import java.util.stream.Collectors;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/6/30 17:49
+ * Description:
+ */
+@Component
+@Slf4j
+public class LtpaTokenHandler {
+ @Resource
+ LtpaTokenProperties ltpaTokenProperties;
+
+ public boolean isLtpaTokenCall(HttpServletRequest request) {
+ return iterator(request, (token, entry) -> true, false);
+ }
+
+ public LtpaToken validateLtpaToken(HttpServletRequest request) {
+ return iterator(request, (token, entry) -> {
+ LtpaToken ltpaToken = new LtpaToken(token);
+ ltpaToken.validate(entry.getValue());
+ return ltpaToken;
+ }, null);
+ }
+
+
+/* public LtpaAuthenticationToken buildToken(HttpServletRequest request) {
+ return iterator(request, (token, entry) -> new LtpaAuthenticationToken(new LtpaToken(token), entry));
+ }*/
+
+/* public static Long getUser(LtpaAuthenticationToken authenticationToken) {
+ LtpaToken ltpaToken = authenticationToken.getLtpaToken();
+ Map.Entry credentials = authenticationToken.getSetting();
+ LtpaTokenSetting setting = credentials.getValue();
+ try {
+ ltpaToken.validate(setting);
+ } catch (Exception e) {
+ throw new RuntimeException(e.getMessage() + " ( " + credentials.getKey() + " )");
+ }
+ return Long.parseLong(setting.getTargetRegExp() == null ? ltpaToken.getCanonicalUser()
+ : setting.getTargetRegExp().matcher(ltpaToken.getCanonicalUser()).replaceAll(setting.getTargetRegVal()));
+ }*/
+
+/* public Long getUser(HttpServletRequest request) {
+ LtpaAuthenticationToken ltpaAuthenticationToken = buildToken(request);
+ return ltpaAuthenticationToken == null ? null : getUser(ltpaAuthenticationToken);
+ }*/
+
+ public LtpaToken generate(long user) {
+ LtpaTokenSetting setting;
+ for (Map.Entry entry : ltpaTokenProperties.entrySet()) {
+ if ((setting = entry.getValue()) != null) {
+ return LtpaToken.generate(String.valueOf(user), setting);
+ }
+ }
+ return null;
+ }
+
+ public final static String LTPA_COOKIE_HEADER = "ltpa-cookie-";
+ public final static String ACCESS_HEADER = "Access-Control-Expose-Headers";
+
+/* public String setCookie(long user) {
+ LtpaAuthenticationToken authenticationToken = generate(user);
+ if (authenticationToken == null || authenticationToken.getLtpaToken() == null) {
+ return null;
+ }
+ String domain = authenticationToken.getSetting().getValue().getDomain();
+ LtpaToken token = authenticationToken.getLtpaToken();
+ HttpServletResponse httpServletResponse = ServletUtils.response();
+ httpServletResponse.addHeader(ACCESS_HEADER, LTPA_COOKIE_HEADER + authenticationToken.getSetting().getKey());
+ httpServletResponse.setHeader(LTPA_COOKIE_HEADER + authenticationToken.getSetting().getKey(),
+ domain = authenticationToken.getSetting().getKey() + "=" + token.getLtpaToken() +
+ "; Expires=" + token.getExpiresDate() + "; Path=/" +
+ (StringUtils.isBlank(domain) ? "" : ("; Domain=" + domain)));
+ return domain;
+ }*/
+
+ /*public void removeCookie() {
+ HttpServletResponse httpServletResponse = ServletUtils.response();
+ List clearList;
+ String value;
+ for (Map.Entry entry : ltpaTokenProperties.entrySet()) {
+ if (entry.getValue() != null) {
+ httpServletResponse.addHeader(ACCESS_HEADER, LTPA_COOKIE_HEADER + entry.getKey());
+ httpServletResponse.setHeader(LTPA_COOKIE_HEADER + entry.getKey(),
+ entry.getKey() + (
+ value = "=" + "; Expires=" + new Date() + "; Path=/" +
+ (StringUtils.isBlank(entry.getValue().getDomain()) ? "" : ("; Domain=" + entry.getValue().getDomain()))
+ ));
+ if ((clearList = entry.getValue().getClearCookies()) != null) {
+ for (String key : clearList) {
+ httpServletResponse.addHeader(ACCESS_HEADER, LTPA_COOKIE_HEADER + key);
+ httpServletResponse.setHeader(LTPA_COOKIE_HEADER + key, key + value);
+ }
+ }
+
+ }
+ }
+ }*/
+
+ public T iterator(HttpServletRequest request, BiFunction, T> caller) {
+ return iterator(request, caller, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ public T iterator(HttpServletRequest request, BiFunction, T> caller, T defaultValue) {
+ if (request == null || ltpaTokenProperties == null) {
+ return defaultValue;
+ }
+ Map cookies = request.getCookies() == null ? Collections.EMPTY_MAP
+ : Arrays.stream(request.getCookies()).collect(Collectors.toMap(Cookie::getName, cookie -> cookie));
+ for (Map.Entry entry : ltpaTokenProperties.entrySet()) {
+ Cookie cookie = cookies.get(entry.getKey());
+ String token = cookie == null ? null : cookie.getValue();
+ if (entry.getValue() != null && (StringUtils.isNotBlank(token) || StringUtils.isNotBlank(token = request.getHeader(entry.getKey())))) {
+ return caller.apply(token, entry);
+ }
+ }
+ return defaultValue;
+ }
+}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/interceptor/LtpaTokenInterceptor.java b/dinky-admin/src/main/java/org/dinky/ltpa/interceptor/LtpaTokenInterceptor.java
new file mode 100644
index 0000000000..701c8322bf
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/interceptor/LtpaTokenInterceptor.java
@@ -0,0 +1,76 @@
+package org.dinky.ltpa.interceptor;
+
+import cn.dev33.satoken.stp.StpUtil;
+import lombok.extern.slf4j.Slf4j;
+import org.dinky.assertion.Asserts;
+import org.dinky.context.TenantContextHolder;
+import org.dinky.context.UserInfoContextHolder;
+import org.dinky.data.dto.UserDTO;
+import org.dinky.data.enums.Status;
+import org.dinky.data.exception.AuthException;
+import org.dinky.data.model.rbac.User;
+import org.dinky.data.result.Result;
+import org.dinky.ltpa.handler.LtpaTokenHandler;
+import org.dinky.ltpa.token.LtpaToken;
+import org.dinky.service.UserService;
+import org.springframework.stereotype.Component;
+import org.springframework.web.servlet.HandlerInterceptor;
+
+import javax.annotation.Resource;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/7/8 15:46
+ * Description:
+ */
+@Component
+@Slf4j
+public class LtpaTokenInterceptor implements HandlerInterceptor {
+ @Resource
+ private LtpaTokenHandler ltpaTokenHandler;
+ @Resource
+ UserService userService;
+
+ @Override
+ public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
+ try {
+ if(StpUtil.isLogin()){
+ return true;
+ }
+ LtpaToken token = ltpaTokenHandler.validateLtpaToken(request);
+ if (token != null) {
+ String[] info = token.getUser().split("\\W+");
+ // 0: userId
+ String user = info[0];
+ Integer userId = Integer.valueOf(user);
+ if(UserInfoContextHolder.get(userId)!=null) {
+ StpUtil.login(userId, true);
+ } else {
+ User userInfo = userService.getById(user);
+ if (Asserts.isNull(userInfo)) {
+ throw new AuthException(Status.USER_NOT_EXIST, user);
+ }
+ Result result = userService.loginUser(userInfo, true);
+ if(Status.LOGIN_SUCCESS.getCode() != result.getCode()){
+ throw new AuthException(Status.findStatusByCode(result.getCode()).orElse(Status.LOGIN_FAILURE), result.getMsg());
+ }
+ }
+ // 1: tenantId
+ if(info.length > 1) {
+ int finalTenantId = Integer.parseInt(info[1]);
+ TenantContextHolder.set(finalTenantId);
+ Cookie cookie = new Cookie("tenantId", info[1]);
+ cookie.setPath("/");
+ cookie.setMaxAge(Integer.MAX_VALUE);
+ response.addCookie(cookie);
+ }
+ }
+ } catch (Exception e){
+ log.warn("Exception {}", e.getMessage());
+ }
+ return true;
+ }
+}
diff --git a/dinky-admin/src/main/java/org/dinky/ltpa/token/LtpaToken.java b/dinky-admin/src/main/java/org/dinky/ltpa/token/LtpaToken.java
new file mode 100644
index 0000000000..3aab41d5ae
--- /dev/null
+++ b/dinky-admin/src/main/java/org/dinky/ltpa/token/LtpaToken.java
@@ -0,0 +1,179 @@
+package org.dinky.ltpa.token;
+
+import org.dinky.ltpa.config.LtpaSetting;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Calendar;
+import java.util.Date;
+
+public final class LtpaToken {
+ private static final java.util.regex.Pattern pattern = java.util.regex.Pattern.compile("((?<==)|^)[^/=]*((?=/)|$)");
+ private byte[] header = new byte[4];
+ private byte[] creation = new byte[8];
+ private byte[] expires = new byte[8];
+ private byte[] user;
+ private String canonicalUser;
+ private byte[] digest = new byte[20];
+ private Date creationDate, expiresDate;
+ private String ltpaToken;
+ private byte[] rawToken;
+
+ public LtpaToken(String token) {
+ ltpaToken = token;
+ rawToken = base64decode(token);
+ user = new byte[(rawToken.length) - 40];
+ System.arraycopy(rawToken, 0, header, 0, header.length); //4
+ System.arraycopy(rawToken, 4, creation, 0, creation.length); //8
+ System.arraycopy(rawToken, 12, expires, 0, expires.length); //8
+ System.arraycopy(rawToken, 20, user, 0, user.length);
+ System.arraycopy(rawToken, rawToken.length - digest.length, digest, 0, digest.length); //20
+
+ creationDate = new Date(Long.parseLong(new String(creation), 16) * 1000);
+ expiresDate = new Date(Long.parseLong(new String(expires), 16) * 1000);
+ canonicalUser = new String(user);
+ }
+
+ private LtpaToken() {
+
+ }
+
+ public static String getCommonUser(String name) {
+ java.util.regex.Matcher matcher = pattern.matcher(name);
+ return matcher.find() ? matcher.group(0) : name;
+ }
+
+ public static boolean isValid(String ltpaToken, String secret) throws NoSuchAlgorithmException {
+ LtpaToken ltpa = new LtpaToken(ltpaToken);
+ return ltpa.isValid(secret);
+ }
+
+ public static LtpaToken generate(String canonicalUser, Date tokenCreation, Date tokenExpires, String secret) {
+ LtpaToken ltpa = new LtpaToken();
+ Calendar calendar = Calendar.getInstance();
+ ltpa.header = new byte[]{0, 1, 2, 3};
+
+ calendar.setTime(tokenCreation);
+ ltpa.creation = Long.toHexString(calendar.getTimeInMillis() / 1000).toUpperCase().getBytes();
+ calendar.setTime(tokenExpires);
+ ltpa.expires = Long.toHexString(calendar.getTimeInMillis() / 1000).toUpperCase().getBytes();
+ ltpa.user = canonicalUser.getBytes();
+ ltpa.canonicalUser = canonicalUser;
+ byte[] token = ltpa.header;
+ token = concatenate(token, ltpa.creation);
+ token = concatenate(token, ltpa.expires);
+ token = concatenate(token, ltpa.user);
+
+ MessageDigest md = null;
+ try {
+ md = MessageDigest.getInstance("SHA-1");
+ } catch (NoSuchAlgorithmException e) {
+ throw new RuntimeException(e);
+ }
+ md.update(token);
+ ltpa.digest = md.digest(base64decode(secret));
+ token = concatenate(token, ltpa.digest);
+
+ ltpa.ltpaToken = base64encode(token);
+ ltpa.rawToken = token;
+ ltpa.creationDate = tokenCreation;
+ ltpa.expiresDate = tokenExpires;
+ return ltpa; //new LtpaToken(base64encode(token));
+ }
+
+ private static byte[] concatenate(byte[] a, byte[] b) {
+ if (a == null) {
+ return b;
+ } else {
+ byte[] bytes = new byte[a.length + b.length];
+
+ System.arraycopy(a, 0, bytes, 0, a.length);
+ System.arraycopy(b, 0, bytes, a.length, b.length);
+ return bytes;
+ }
+ }
+
+ public static byte[] base64decode(String src) {
+ return java.util.Base64.getDecoder().decode(src.getBytes());
+ }
+
+ public static String base64encode(byte[] src) {
+ return new String(java.util.Base64.getEncoder().encode(src));
+ }
+
+ public Date getCreationDate() {
+ return creationDate;
+ }
+
+ public Date getExpiresDate() {
+ return expiresDate;
+ }
+
+ public String getCanonicalUser() {
+ return canonicalUser; //new String(user);
+ }
+
+ public String getUser() {
+ return LtpaToken.getCommonUser(canonicalUser); //LtpaTokenHandler.getCommonUser(new String(user));
+ }
+
+ public boolean isValid(String secret) {
+ Date now = new Date();
+ if (!(now.after(creationDate) && now.before(expiresDate))) return false;
+
+ byte[] bytes = header;
+ bytes = concatenate(bytes, creation);
+ bytes = concatenate(bytes, expires);
+ bytes = concatenate(bytes, user);
+ bytes = concatenate(bytes, base64decode(secret));
+ try {
+ return MessageDigest.isEqual(digest, MessageDigest.getInstance("SHA-1").digest(bytes));
+ } catch (NoSuchAlgorithmException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public static final String INVALID="Token签名错误.";
+ public static final String INVALID_CREATE_TIME="Token创建时间错误.";
+ public static final String INVALID_EXPIRATION="Token过期.";
+ public void validate(LtpaSetting setting) {
+ if (!this.isValid(setting.getSecret())) {
+ throw new RuntimeException(INVALID);
+ }
+// if (!setting.version.equals(this.getVersion())) {
+// throw new RuntimeException("Token版本号错误:" + this.getVersion());
+// }
+ long timeCreation = this.getCreationDate().getTime();
+ long timeExpiration = this.getExpiresDate().getTime();
+ long now = System.currentTimeMillis();
+ if (timeCreation > (now + setting.getTransition() * 1000)) {
+ throw new RuntimeException(INVALID_CREATE_TIME);
+ }
+ if (timeExpiration < now || timeCreation > timeExpiration) {
+ throw new RuntimeException(INVALID_EXPIRATION);
+ }
+ }
+
+ public static LtpaToken generate(String canonicalUser, LtpaSetting setting) {
+ long now = System.currentTimeMillis();
+ return generate(canonicalUser, new Date(now - setting.getTransition() * 1000),
+ new Date(now + setting.getExpiration() * 1000),
+ setting.getSecret());
+ }
+
+ public String toString() {
+ return ltpaToken;
+ }
+
+ public String getLtpaToken() {
+ return ltpaToken;
+ }
+
+ public void setLtpaToken(String ltpaToken) {
+ this.ltpaToken = ltpaToken;
+ }
+
+ public String getVersion() {
+ return new String(this.header);
+ }
+}
\ No newline at end of file
diff --git a/dinky-admin/src/main/java/org/dinky/service/UserService.java b/dinky-admin/src/main/java/org/dinky/service/UserService.java
index b428dc5246..ef5f3f5b9c 100644
--- a/dinky-admin/src/main/java/org/dinky/service/UserService.java
+++ b/dinky-admin/src/main/java/org/dinky/service/UserService.java
@@ -80,6 +80,8 @@ public interface UserService extends ISuperService {
*/
Result loginUser(LoginDTO loginDTO);
+ Result loginUser(User user, boolean autoLogin);
+
/**
* get user by username
*
diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java
index 10ec4b90d2..4988ed5cb3 100644
--- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java
+++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java
@@ -21,6 +21,7 @@
import static org.dinky.data.model.SystemConfiguration.FLINK_JOB_ARCHIVE;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.dinky.assertion.Asserts;
import org.dinky.assertion.DinkyAssert;
import org.dinky.config.Dialect;
@@ -63,6 +64,7 @@
import org.dinky.explainer.lineage.LineageBuilder;
import org.dinky.explainer.lineage.LineageResult;
import org.dinky.explainer.sqllineage.SQLLineageBuilder;
+import org.dinky.function.FunctionFactory;
import org.dinky.function.compiler.CustomStringJavaCompiler;
import org.dinky.function.data.model.UDF;
import org.dinky.function.pool.UdfCodePool;
@@ -104,14 +106,7 @@
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Base64;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
+import java.util.*;
import java.util.stream.Collectors;
import javax.annotation.Resource;
@@ -134,7 +129,6 @@
import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.core.bean.BeanUtil;
-import cn.hutool.core.exceptions.ExceptionUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.lang.tree.Tree;
import cn.hutool.core.lang.tree.TreeNode;
@@ -577,28 +571,31 @@ public void initTenantByTaskId(Integer id) {
public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) throws SqlExplainExcepition {
TaskDTO task = getTaskInfoById(taskId);
task.setStep(lifeCycle.getValue());
- if (lifeCycle == JobLifeCycle.PUBLISH) {
- Integer taskVersionId = taskVersionService.createTaskVersionSnapshot(task);
- task.setVersionId(taskVersionId);
- if (Dialect.isUDF(task.getDialect())) {
- // compile udf class
- try {
- UDF udf = UDFUtils.taskToUDF(task.buildTask());
- UdfCodePool.addOrUpdate(udf);
- } catch (Throwable e) {
- throw new BusException(
- "UDF compilation failed and cannot be published. The error message is as follows:"
- + ExceptionUtil.stacktraceToOneLineString(e),
- e);
+
+ if (Dialect.isUDF(task.getDialect())
+ && Asserts.isNotNull(task.getConfigJson())
+ && Asserts.isNotNull(task.getConfigJson().getUdfConfig())) {
+ // 发布前重新清除,不正常情况,待发布,内存已定义,或者下线没清成功,如启动多个服务
+ UdfCodePool.remove(task.getConfigJson().getUdfConfig().getClassName());
+
+ if (lifeCycle == JobLifeCycle.PUBLISH) {
+ Integer taskVersionId = taskVersionService.createTaskVersionSnapshot(task);
+ task.setVersionId(taskVersionId);
+ if (Dialect.isUDF(task.getDialect())) {
+ // compile udf class
+ try {
+ // 必须抛出异常,不修复数据的任务状态
+ UDF udf = UDFUtils.taskToUDF(task.buildTask());
+ UdfCodePool.addOrUpdate(udf);
+ } catch (Throwable e) {
+ throw new BusException(
+ "UDF compilation failed and cannot be published. The error message is as follows:"
+ + ExceptionUtils.getStackTrace(e));
+ }
}
}
- } else {
- if (Dialect.isUDF(task.getDialect())
- && Asserts.isNotNull(task.getConfigJson())
- && Asserts.isNotNull(task.getConfigJson().getUdfConfig())) {
- UdfCodePool.remove(task.getConfigJson().getUdfConfig().getClassName());
- }
}
+
boolean saved = saveOrUpdate(task.buildTask());
if (saved && Asserts.isNotNull(task.getJobInstanceId())) {
JobInstance jobInstance = jobInstanceService.getById(task.getJobInstanceId());
@@ -616,6 +613,40 @@ public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) thro
return saved;
}
+ public List getUDFByClassName(String className) {
+ return list(new LambdaQueryWrapper()
+ .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue())
+ .eq(Task::getEnabled, 1)
+ .eq(Task::getStep, JobLifeCycle.PUBLISH.getValue())
+ .like(Task::getConfigJson, "\"className\":\"" + className + "\""));
+ }
+
+ /**
+ * use : org.dinky.function.util.UDFUtil.toUDF
+ * 发布任务时,内存中没有UDF可以找到相关已发布UDF类编译,没有才返回错误
+ */
+ public UDF addOrUpdateUdfCodePool(String className) {
+ List tasks = getUDFByClassName(className)
+ .stream().filter(task->Asserts.isNotNull(task.getConfigJson().getUdfConfig())).collect(Collectors.toList());
+ switch (tasks.size()) {
+ case 1:
+ Task task = tasks.get(0);
+ UDF udf = UDFUtils.taskToUDF(task);
+ try {
+ FunctionFactory.initUDF(Collections.singletonList(udf), task.getId());
+ } catch (Throwable e) {
+ throw new BusException(
+ "UDF compilation failed and cannot be published. The error message is as follows:"
+ + ExceptionUtils.getStackTrace(e));
+ }
+ UdfCodePool.addOrUpdate(udf);
+ return udf;
+ default:
+ log.error("found duplicate class [{}] , task name : {}", className, String.join(", ", tasks.stream().map(t->t.getName()).collect(Collectors.toSet())));
+ }
+ return null;
+ }
+
@Override
@Transactional(rollbackFor = Exception.class)
public boolean saveOrUpdateTask(Task task) {
diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java
index e6dc26842f..655e015446 100644
--- a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java
+++ b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java
@@ -185,6 +185,11 @@ public Result loginUser(LoginDTO loginDTO) {
return Result.authorizeFailed(e.getStatus());
}
+ return loginUser(user, loginDTO.isAutoLogin());
+ }
+
+ @Override
+ public Result loginUser(User user, boolean autoLogin) {
// Check if the user is enabled
if (!user.getEnabled()) {
loginLogService.saveLoginLog(user, Status.USER_DISABLED_BY_ADMIN);
@@ -199,7 +204,7 @@ public Result loginUser(LoginDTO loginDTO) {
// Perform login using StpUtil (Assuming it handles the session management)
Integer userId = user.getId();
- StpUtil.login(userId, loginDTO.isAutoLogin());
+ StpUtil.login(userId, autoLogin);
// save login log record
loginLogService.saveLoginLog(user, Status.LOGIN_SUCCESS);
@@ -323,6 +328,14 @@ private UserDTO refreshUserInfo(User user) {
public User getUserByUsername(String username) {
return getOne(new LambdaQueryWrapper().eq(User::getUsername, username));
}
+//
+// @Override
+// public User getUserByUsernameOrId(String username) {
+// return getOne(
+// StringUtils.isNumeric(username) ? new LambdaQueryWrapper().eq(User::getUsername, username) :
+// new LambdaQueryWrapper().eq(User::getUsername, username)
+// .or().eq(User::getId, username).orderByAsc(User::getUsername));
+// }
@Override
@Transactional(rollbackFor = Exception.class)
diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java
index 5bb06d0ac7..1fd8840cb5 100644
--- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java
+++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java
@@ -19,6 +19,7 @@
package org.dinky.utils;
+import cn.hutool.core.util.StrUtil;
import org.dinky.assertion.Asserts;
import org.dinky.data.exception.BusException;
import org.dinky.data.model.Task;
@@ -26,11 +27,26 @@
import org.dinky.function.compiler.FunctionCompiler;
import org.dinky.function.compiler.FunctionPackage;
import org.dinky.function.data.model.UDF;
+import org.dinky.function.exception.UDFCompilerException;
import org.dinky.function.util.UDFUtil;
import org.apache.flink.table.catalog.FunctionLanguage;
+import java.util.stream.Stream;
+
public class UDFUtils extends UDFUtil {
+ public static UDF taskToUDFSilent(Task task) {
+ try {
+ return taskToUDF(task);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return null;
+ }
+ }
+
+ public static Stream taskToUDFSilent(Stream tasks) {
+ return tasks.map(UDFUtils::taskToUDFSilent).filter(Asserts::isNotNull);
+ }
public static UDF taskToUDF(Task task) {
if (Asserts.isNotNull(task.getConfigJson())
@@ -40,8 +56,14 @@ public static UDF taskToUDF(Task task) {
.code(task.getStatement())
.functionLanguage(FunctionLanguage.valueOf(task.getDialect().toUpperCase()))
.build();
-
- FunctionCompiler.getCompilerByTask(udf, task.getConfigJson().getCustomConfigMaps(), task.getId());
+ try {
+ if(!FunctionCompiler.getCompilerByTask(udf, task.getConfigJson().getCustomConfigMaps(), task.getId())){
+ throw new UDFCompilerException(StrUtil.format(
+ "codeLanguage:{} , className:{} 编译失败", udf.getFunctionLanguage(), udf.getClassName()));
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
FunctionPackage.bale(udf, task.getId());
return udf;
} else {
diff --git a/dinky-admin/src/main/resources/application-mysql.yml b/dinky-admin/src/main/resources/application-mysql.yml
index 6c71564216..fbb1cc5cc1 100644
--- a/dinky-admin/src/main/resources/application-mysql.yml
+++ b/dinky-admin/src/main/resources/application-mysql.yml
@@ -17,7 +17,7 @@
spring:
datasource:
- url: jdbc:mysql://${MYSQL_ADDR:127.0.0.1:3306}/${MYSQL_DATABASE:dinky}?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
- username: ${MYSQL_USERNAME:dinky}
- password: ${MYSQL_PASSWORD:dinky}
+ url: jdbc:mysql://${MYSQL_ADDR:hadoop801:3306}/${MYSQL_DATABASE:dinky10}?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
+ username: ${MYSQL_USERNAME:root}
+ password: ${MYSQL_PASSWORD:qwer1234}
driver-class-name: com.mysql.cj.jdbc.Driver
diff --git a/dinky-admin/src/main/resources/application.properties b/dinky-admin/src/main/resources/application.properties
index 8c978aabec..e429787eaa 100644
--- a/dinky-admin/src/main/resources/application.properties
+++ b/dinky-admin/src/main/resources/application.properties
@@ -84,3 +84,11 @@ knife4j.enable=true
knife4j.setting.enable-footer=false
knife4j.setting.enable-footer-custom=true
knife4j.setting.footer-custom-content=Apache License 2.0 | Copyright \u00A9 2023 Dinky, Inc. DataLinkDC. [\u9C81ICP\u590720001630\u53F7-2](https://beian.miit.gov.cn)
+
+spring.autoconfigure.exclude= \
+ org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration, \
+ org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration
+
+
+ltpa.dinky-ltpa.secret = 2OqzZkZ//RvOLF+X1HqNJWcCBHE=
+ltpa.dinky-ltpa.expiration = 86400
\ No newline at end of file
diff --git a/dinky-admin/src/main/resources/application.yml b/dinky-admin/src/main/resources/application.yml
index a71804ab9e..6ce7f505ac 100644
--- a/dinky-admin/src/main/resources/application.yml
+++ b/dinky-admin/src/main/resources/application.yml
@@ -16,7 +16,7 @@ spring:
# If you use pgsql database, please configure pgsql database connection information in application-postgresql.yml
# If you use the h2 database, please configure the h2 database connection information in application-h2.yml,
# note: the h2 database is only for experience use, and the related data that has been created cannot be migrated, please use it with caution
- active: ${DB_ACTIVE:h2} #[h2,mysql,postgresql]
+ active: ${DB_ACTIVE:mysql} #[h2,mysql,pgsql]
include:
- jmx
- flyway
diff --git a/dinky-admin/src/main/resources/log4j2.xml b/dinky-admin/src/main/resources/log4j2.xml
index 088ef44587..656b69590b 100644
--- a/dinky-admin/src/main/resources/log4j2.xml
+++ b/dinky-admin/src/main/resources/log4j2.xml
@@ -82,5 +82,15 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/dinky-admin/src/test/java/org/apache/flink/table/test/TestDemo1UDF.java b/dinky-admin/src/test/java/org/apache/flink/table/test/TestDemo1UDF.java
new file mode 100644
index 0000000000..6b8daac942
--- /dev/null
+++ b/dinky-admin/src/test/java/org/apache/flink/table/test/TestDemo1UDF.java
@@ -0,0 +1,19 @@
+package org.apache.flink.table.test;
+
+import org.apache.flink.table.test.utils.TestUtil;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/10/19 14:05
+ * Description:
+ */
+
+@Ignore
+public class TestDemo1UDF {
+ @Test
+ public void test1() {
+ TestUtil.executeSql("./sql/TestDemoUnnest.sql");
+ }
+}
diff --git a/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlType.java b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlType.java
new file mode 100644
index 0000000000..522488cc54
--- /dev/null
+++ b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlType.java
@@ -0,0 +1,74 @@
+package org.apache.flink.table.test.utils;
+
+import java.util.regex.Pattern;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/6/15 10:21
+ * Description:
+ */
+public enum SqlType {
+ SELECT("SELECT", "^SELECT.*"),
+
+ CREATE("CREATE", "^CREATE(?!\\s+TABLE.*AS SELECT).*$"),
+
+ DROP("DROP", "^DROP.*"),
+
+ ALTER("ALTER", "^ALTER.*"),
+
+ INSERT("INSERT", "^INSERT.*"),
+
+ DESC("DESC", "^DESC.*"),
+
+ DESCRIBE("DESCRIBE", "^DESCRIBE.*"),
+
+ EXPLAIN("EXPLAIN", "^EXPLAIN.*"),
+
+ USE("USE", "^USE.*"),
+
+ SHOW("SHOW", "^SHOW.*"),
+
+ LOAD("LOAD", "^LOAD.*"),
+
+ UNLOAD("UNLOAD", "^UNLOAD.*"),
+
+ SET("SET", "^SET.*"),
+
+ RESET("RESET", "^RESET.*"),
+
+ EXECUTE("EXECUTE", "^EXECUTE.*"),
+
+ ADD_JAR("ADD_JAR", "^ADD\\s+JAR\\s+\\S+"),
+
+ ADD("ADD", "^ADD\\s+CUSTOMJAR\\s+\\S+"),
+
+ ADD_FILE("ADD_FILE", "^ADD\\s+FILE\\s+\\S+"),
+
+ PRINT("PRINT", "^PRINT.*"),
+
+ CTAS("CTAS", "^CREATE\\s.*AS\\sSELECT.*$"),
+
+ WITH("WITH", "^WITH.*"),
+
+ UNKNOWN("UNKNOWN", "^UNKNOWN.*");
+
+ private String type;
+ private Pattern pattern;
+
+ SqlType(String type, String regexp) {
+ this.type = type;
+ this.pattern = Pattern.compile(regexp, Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public boolean match(String statement) {
+ return pattern.matcher(statement).matches();
+ }
+}
diff --git a/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlUtil.java b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlUtil.java
new file mode 100644
index 0000000000..a92a11b745
--- /dev/null
+++ b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/SqlUtil.java
@@ -0,0 +1,87 @@
+package org.apache.flink.table.test.utils;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/5/30 11:30
+ * Description:
+ */
+public class SqlUtil {
+
+ private static final String SEMICOLON = ";";
+ private static final String SQL_SEPARATOR = ";\\s*(?:\\n|--.*)";
+
+ private SqlUtil() {}
+
+ public static String[] getStatements(String sql) {
+ return getStatements(sql, SQL_SEPARATOR);
+ }
+
+ public static String[] getStatements(String sql, String sqlSeparator) {
+ if (StringUtils.isBlank(sql)) {
+ return new String[0];
+ }
+
+ final String localSqlSeparator = ";\\s*(?:\\n|--.*)";
+ String[] splits = sql.replace("\r\n", "\n").split(localSqlSeparator);
+ String lastStatement = splits[splits.length - 1].trim();
+ if (lastStatement.endsWith(SEMICOLON)) {
+ splits[splits.length - 1] = lastStatement.substring(0, lastStatement.length() - 1);
+ }
+
+ return splits;
+ }
+
+ public static String removeNote(String sql) {
+
+ if (StringUtils.isNotBlank(sql)) {
+ // Remove the special-space characters
+ sql = sql.replaceAll("\u00A0", " ").replaceAll("[\r\n]+", "\n");
+ // Remove annotations Support '--aa' , '/**aaa*/' , '//aa' , '#aaa'
+ Pattern p = Pattern.compile("(?ms)('(?:''|[^'])*')|--.*?$|/\\*[^+].*?\\*/|");
+ String presult = p.matcher(sql).replaceAll("$1");
+ return presult.trim();
+ }
+ return sql;
+ }
+
+ public static String[] preparedStatement(String sql) {
+ return getStatements(removeNote(sql));
+ }
+
+ /*
+ public static String replaceAllParam(String sql, String name, String value) {
+ return sql.replaceAll("#\\{" + name + "\\}", value);
+ }*/
+
+ public static String replaceAllParam(String sql, Map values) {
+ if (StringUtils.isBlank(sql)) {
+ return "";
+ }
+ for (Map.Entry entry : values.entrySet()) {
+ sql = replaceAllParam(sql, entry.getKey(), entry.getValue());
+ }
+ return sql;
+ }
+
+ public static String replaceAllParam(String sql, String name, String value) {
+ return sql.replaceAll("\\$\\{" + name + "}", value);
+ }
+
+
+ public static String addLineNumber(String input) {
+ String[] lines = input.split("\n");
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < lines.length; i++) {
+ sb.append(String.format("%-4d", i + 1));
+ sb.append(" ");
+ sb.append(lines[i]);
+ sb.append("\n");
+ }
+ return sb.toString();
+ }
+}
diff --git a/dinky-admin/src/test/java/org/apache/flink/table/test/utils/TestUtil.java b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/TestUtil.java
new file mode 100644
index 0000000000..6b3d116309
--- /dev/null
+++ b/dinky-admin/src/test/java/org/apache/flink/table/test/utils/TestUtil.java
@@ -0,0 +1,82 @@
+package org.apache.flink.table.test.utils;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.streaming.api.CheckpointingMode;
+import org.apache.flink.streaming.api.environment.CheckpointConfig;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/5/30 10:11
+ * Description:
+ */
+public class TestUtil {
+ public static StreamExecutionEnvironment getLocalExecutionEnvironment(){
+ // StreamTableEnvironment tEnv = StreamTableEnvironment.create(StreamExecutionEnvironment.createLocalEnvironment());
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+// "decimal.handling.mode", "string"
+ env.enableCheckpointing(10000);
+ // 设置模式为exactly-once (这是默认值)
+ env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
+ // 确保检查点之间有至少500 ms的间隔【checkpoint最小间隔】
+ env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
+ // 检查点必须在10分钟内完成,或者被丢弃【checkpoint的超时时间】
+ env.getCheckpointConfig().setCheckpointTimeout(600000);
+ // 同一时间只允许进行一个检查点
+ env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
+ // cancel后,保留Checkpoint数据,以便根据实际需要恢复到指定的Checkpoint
+ env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
+ // scan.incremental.snapshot.enabled
+ env.getCheckpointConfig().setCheckpointStorage("file:///dist/test/checkpoint");
+ return env;
+ }
+
+ public static StreamTableEnvironment getLocalStreamTableEnvironment(){
+ return StreamTableEnvironment.create(getLocalExecutionEnvironment());
+ }
+
+ public static String loadFile(String path){
+ try (InputStream inputStream =
+ Thread.currentThread().getContextClassLoader().getResourceAsStream(path)){
+ assert inputStream != null;
+ return IOUtils.toString(inputStream, StandardCharsets.UTF_8);
+ /*
+ String text = new BufferedReader(
+ new InputStreamReader(Objects.requireNonNull(TestUtil.class.getResourceAsStream(path)), StandardCharsets.UTF_8))
+ .lines()
+ .collect(Collectors.joining("\n"));
+ */
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ }
+
+ public static String[] getStatements(String path){
+ return SqlUtil.preparedStatement(loadFile(path));
+ }
+
+ public static void executeSql(String path) {
+ StreamTableEnvironment environment = getLocalStreamTableEnvironment();
+ String[] statements = TestUtil.getStatements(path);
+ for(String statement : statements){
+ if(StringUtils.isBlank(statement=statement.trim()))
+ continue;
+ System.out.printf("%nFlink SQL [%d]> %s;%n", System.currentTimeMillis(), statement);
+ environment.executeSql(statement).print();
+ }
+ System.out.printf("[%d] END...%n", System.currentTimeMillis());
+ try {
+ new CountDownLatch(1).await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/dinky-admin/src/test/java/org/dinky/admin/AdminTest.java b/dinky-admin/src/test/java/org/dinky/admin/AdminTest.java
index 0b1a8b4455..ceddf805a6 100644
--- a/dinky-admin/src/test/java/org/dinky/admin/AdminTest.java
+++ b/dinky-admin/src/test/java/org/dinky/admin/AdminTest.java
@@ -19,11 +19,20 @@
package org.dinky.admin;
+import cn.dev33.satoken.secure.SaSecureUtil;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.jasper.tagplugins.jstl.core.Url;
+import org.assertj.core.util.Arrays;
+import org.dinky.ltpa.config.LtpaSetting;
+import org.dinky.ltpa.token.LtpaToken;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
-import cn.dev33.satoken.secure.SaSecureUtil;
+import java.io.IOException;
+import java.net.*;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
/**
* SqlParserTest
@@ -31,6 +40,7 @@
* @since 2021/6/14 17:03
*/
@Ignore
+@Slf4j
public class AdminTest {
@Test
@@ -38,4 +48,53 @@ public void adminTest() {
String admin = SaSecureUtil.md5("admin");
Assert.assertEquals("21232f297a57a5a743894a0e4a801fc3", admin);
}
+
+ @Test
+ public void ltpaTest() {
+ LtpaToken token = ltpaToken();
+ log.info("\n\n cookie > Set-Cookie: tenantId=1; Expires={}; Path=/", token.getExpiresDate());
+ log.info("\n\n cookie > Set-Cookie: dinky-ltpa={}; Expires={}; Path=/\n\n", token, token.getExpiresDate());
+ }
+
+ public LtpaToken ltpaToken(){
+ LtpaSetting setting = new LtpaSetting();
+ setting.setSecret("2OqzZkZ//RvOLF+X1HqNJWcCBHE=");
+ setting.setExpiration(86400); // 24h
+ return LtpaToken.generate("1", setting);
+ }
+
+ @Test
+ public void ltpaRequestTest() throws IOException {
+ URL url = new URL("http://localhost:8888/api/task?id=343");
+ CookieManager cookieManager = new CookieManager();
+ cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL);
+ CookieHandler.setDefault(cookieManager);
+
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ try {
+ conn.setRequestMethod("GET");
+ conn.setRequestProperty("Accept", "application/json, text/plain, */*");
+ conn.setRequestProperty("Connection", "keep-alive");
+ conn.setRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0");
+ conn.setRequestProperty("Cookie", "language=zh-CN; tenantId=1; dinky-ltpa="+ ltpaToken().toString());
+ conn.setInstanceFollowRedirects(true);
+
+ int responseCode = conn.getResponseCode();
+ if (responseCode == HttpURLConnection.HTTP_OK) {
+ java.io.BufferedReader in = new java.io.BufferedReader(new java.io.InputStreamReader(conn.getInputStream()));
+ String inputLine;
+ StringBuffer response = new StringBuffer();
+ while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+ }
+ in.close();
+
+ System.out.println("Response: " + response);
+ } else {
+ System.out.println("HTTP response code: " + responseCode);
+ }
+ } finally {
+ conn.disconnect();
+ }
+ }
}
diff --git a/dinky-admin/src/test/java/org/dinky/admin/FlinkStreamRuleSetsTest.scala b/dinky-admin/src/test/java/org/dinky/admin/FlinkStreamRuleSetsTest.scala
new file mode 100644
index 0000000000..54557c368d
--- /dev/null
+++ b/dinky-admin/src/test/java/org/dinky/admin/FlinkStreamRuleSetsTest.scala
@@ -0,0 +1,34 @@
+package org.dinky.admin
+
+import org.apache.calcite.tools.RuleSets
+import org.apache.flink.table.planner.plan.rules.FlinkStreamRuleSets
+import org.junit.Assert._
+import org.junit.Test
+import scala.collection.JavaConverters._
+
+
+class FlinkStreamRuleSetsTest {
+
+ @Test
+ def testRuleSetLoading(): Unit = {
+ try {
+ // 加载一个示例 RuleSet
+ val ruleSet = FlinkStreamRuleSets.DEFAULT_REWRITE_RULES
+
+ // 验证规则数量
+ assertNotNull("RuleSet should not be null", ruleSet)
+ val rules = ruleSet.iterator().asScala.toList
+ assertTrue("RuleSet should contain rules", rules.nonEmpty)
+
+ // 打印规则的名称
+ println("Loaded RuleSet with the following rules:")
+ rules.foreach(rule => println(rule.getClass.getName))
+
+ } catch {
+ case e: Exception =>
+ // 打印错误信息
+ e.printStackTrace()
+ fail(s"Failed to load RuleSet: ${e.getMessage}")
+ }
+ }
+}
diff --git a/dinky-admin/src/test/resources/sql/TestDemoUnnest.sql b/dinky-admin/src/test/resources/sql/TestDemoUnnest.sql
new file mode 100644
index 0000000000..8d2233dccb
--- /dev/null
+++ b/dinky-admin/src/test/resources/sql/TestDemoUnnest.sql
@@ -0,0 +1,58 @@
+CREATE TABLE json_table (
+ total BIGINT,
+ `data` ARRAY>,
+ `page` BIGINT,
+ `rows` BIGINT
+)
+WITH
+ (
+ 'connector' = 'filesystem',
+ 'path' = 'hdfs:///userFiles/U0000001/2018record3.jsons',
+ 'format' = 'json'
+ );
+
+CREATE TABLE `default_catalog`.`default_database`.szt_data1 WITH (
+ 'connector' = 'blackhole'
+) AS
+SELECT
+ total,
+ data_row.d1,
+ data_row.close_date,
+ data_row.card_no1,
+ data_row.deal_value,
+ data_row.deal_type,
+ data_row.company_name,
+ data_row.car_no,
+ data_row.station,
+ data_row.conn_mark,
+ data_row.deal_money,
+ data_row.equ_no,
+ page,
+ `rows`
+FROM
+ json_table,
+ UNNEST(json_table.data) AS data_row (
+ d1,
+ close_date,
+ card_no1,
+ deal_value,
+ deal_type,
+ company_name,
+ car_no,
+ station,
+ conn_mark,
+ deal_money,
+ equ_no
+ );
\ No newline at end of file
diff --git a/dinky-alert/dinky-alert-feishu/pom.xml b/dinky-alert/dinky-alert-feishu/pom.xml
index 4999e07713..ae2ff6edb7 100644
--- a/dinky-alert/dinky-alert-feishu/pom.xml
+++ b/dinky-alert/dinky-alert-feishu/pom.xml
@@ -52,5 +52,9 @@
junit
test
+
+ org.dinky
+ dinky-alert-base
+
diff --git a/dinky-alert/dinky-alert-http/pom.xml b/dinky-alert/dinky-alert-http/pom.xml
index ed7bb76845..8397efcd95 100644
--- a/dinky-alert/dinky-alert-http/pom.xml
+++ b/dinky-alert/dinky-alert-http/pom.xml
@@ -28,6 +28,10 @@
junit
test
+
+ org.dinky
+ dinky-alert-base
+
diff --git a/dinky-app/dinky-app-base/pom.xml b/dinky-app/dinky-app-base/pom.xml
index c4cad0dd62..f9c8c309fe 100644
--- a/dinky-app/dinky-app-base/pom.xml
+++ b/dinky-app/dinky-app-base/pom.xml
@@ -209,6 +209,36 @@
+
+ org.dinky
+ dinky-metadata-dm
+
+
+ *
+ *
+
+
+
+
+ org.dinky
+ dinky-metadata-kingbase
+
+
+ *
+ *
+
+
+
+
+ org.dinky
+ dinky-metadata-trino
+
+
+ *
+ *
+
+
+
diff --git a/dinky-assembly/src/main/assembly/package.xml b/dinky-assembly/src/main/assembly/package.xml
index 56a1da3591..ee774d3b38 100644
--- a/dinky-assembly/src/main/assembly/package.xml
+++ b/dinky-assembly/src/main/assembly/package.xml
@@ -247,6 +247,30 @@
dinky-metadata-presto-${project.version}.jar
+
+ ${project.parent.basedir}/dinky-metadata/dinky-metadata-dm/target
+
+ lib
+
+ dinky-metadata-dm-${project.version}.jar
+
+
+
+ ${project.parent.basedir}/dinky-metadata/dinky-metadata-kingbase/target
+
+ lib
+
+ dinky-metadata-kingbase-${project.version}.jar
+
+
+
+ ${project.parent.basedir}/dinky-metadata/dinky-metadata-trino/target
+
+ lib
+
+ dinky-metadata-trino-${project.version}.jar
+
+
${project.parent.basedir}/dinky-alert/dinky-alert-dingtalk/target
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/AbstractCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/AbstractCDCBuilder.java
index d45d6f91f9..a8d6dee6a7 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/AbstractCDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/AbstractCDCBuilder.java
@@ -29,6 +29,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.regex.Pattern;
public abstract class AbstractCDCBuilder implements CDCBuilder {
@@ -56,12 +57,12 @@ public List getSchemaList() {
String[] schemas = schema.split(FlinkParamConstant.SPLIT);
Collections.addAll(schemaList, schemas);
}
-
+ Pattern pattern = Pattern.compile("\\W");
getTableList().stream()
.map(String::trim)
.filter(tableName -> Asserts.isNotNullString(tableName) && tableName.contains("."))
.map(tableName -> tableName.split("\\\\."))
- .filter(names -> !schemaList.contains(names[0]))
+ .filter(names -> !pattern.matcher(names[0]).find() && !schemaList.contains(names[0]))
.forEach(names -> schemaList.add(names[0]));
return schemaList;
}
@@ -92,7 +93,7 @@ public Map> parseMetaDataConfigs() {
public Map parseMetaDataSingleConfig(String url) {
Map configMap = new HashMap<>();
- configMap.put(ClientConstant.METADATA_NAME, url);
+ configMap.put(ClientConstant.METADATA_NAME, getStandardName(url));
configMap.put(ClientConstant.METADATA_URL, url);
configMap.put(ClientConstant.METADATA_TYPE, getMetadataType());
configMap.put(ClientConstant.METADATA_USERNAME, config.getUsername());
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilder.java
index f850f8348f..ebc3b63ea4 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilder.java
@@ -47,4 +47,8 @@ public interface CDCBuilder {
default Map parseMetaDataConfig() {
throw new SplitTableException("此数据源并未实现分库分表");
}
+
+ default String getStandardName(String name){
+ return name.replaceAll("\\W", "_");
+ }
}
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilderFactory.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilderFactory.java
index d223a86109..9fa1a81cc4 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilderFactory.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/CDCBuilderFactory.java
@@ -20,6 +20,8 @@
package org.dinky.cdc;
import org.dinky.assertion.Asserts;
+import org.dinky.cdc.dm.DmCDCBuilder;
+import org.dinky.cdc.kingbase.KingbaseCDCBuilder;
import org.dinky.cdc.mysql.MysqlCDCBuilder;
import org.dinky.cdc.oracle.OracleCDCBuilder;
import org.dinky.cdc.postgres.PostgresCDCBuilder;
@@ -41,6 +43,8 @@ private CDCBuilderFactory() {}
.put(OracleCDCBuilder.KEY_WORD, OracleCDCBuilder::new)
.put(PostgresCDCBuilder.KEY_WORD, PostgresCDCBuilder::new)
.put(SqlServerCDCBuilder.KEY_WORD, SqlServerCDCBuilder::new)
+ .put(KingbaseCDCBuilder.KEY_WORD, KingbaseCDCBuilder::new)
+ .put(DmCDCBuilder.KEY_WORD, DmCDCBuilder::new)
.build();
public static CDCBuilder buildCDCBuilder(FlinkCDCConfig config) {
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/convert/DataTypeConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/convert/DataTypeConverter.java
index a1f80fde30..e07105d1a8 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/convert/DataTypeConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/convert/DataTypeConverter.java
@@ -19,6 +19,7 @@
package org.dinky.cdc.convert;
+import org.apache.flink.table.types.logical.utils.LogicalTypeParser;
import org.dinky.assertion.Asserts;
import org.dinky.data.model.Column;
@@ -49,6 +50,7 @@
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
+import java.util.Optional;
import java.util.concurrent.TimeUnit;
import javax.xml.bind.DatatypeConverter;
@@ -61,6 +63,8 @@ public class DataTypeConverter {
public static final long MILLIS_PER_DAY = 86400000L; // = 24 * 60 * 60 * 1000
public static LogicalType getLogicalType(Column column) {
+ return LogicalTypeParser.parse(column.getFlinkType(), DataTypeConverter.class.getClassLoader());
+ /*
switch (column.getJavaType()) {
case BOOLEAN:
case JAVA_LANG_BOOLEAN:
@@ -111,13 +115,14 @@ public static LogicalType getLogicalType(Column column) {
case STRING:
default:
return new VarCharType(Asserts.isNull(column.getLength()) ? Integer.MAX_VALUE : column.getLength());
- }
+ }*/
}
public static Object convertToRow(Object value, LogicalType logicalType, ZoneId timeZone) {
if (Asserts.isNull(value)) {
return null;
}
+
switch (logicalType.getTypeRoot()) {
case BOOLEAN:
return convertToBoolean(value);
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/DebeziumCustomConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/DebeziumCustomConverter.java
index 1f6e781126..67aa1b4968 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/DebeziumCustomConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/DebeziumCustomConverter.java
@@ -19,7 +19,7 @@
package org.dinky.cdc.debezium;
-import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
+import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder ;
import java.time.ZoneId;
import java.time.ZoneOffset;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/MysqlDebeziumConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/MysqlDebeziumConverter.java
index 464f74f929..254bc8875d 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/MysqlDebeziumConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/MysqlDebeziumConverter.java
@@ -21,7 +21,7 @@
import org.dinky.cdc.debezium.DebeziumCustomConverter;
-import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
+import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
import java.time.Instant;
import java.time.ZoneOffset;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/OracleDebeziumConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/OracleDebeziumConverter.java
index ff1b7df1f5..84a30caab5 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/OracleDebeziumConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/OracleDebeziumConverter.java
@@ -21,7 +21,7 @@
import org.dinky.cdc.debezium.DebeziumCustomConverter;
-import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
+import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
import io.debezium.spi.converter.RelationalColumn;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/PostgresDebeziumConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/PostgresDebeziumConverter.java
index cfb6115b45..0e88b20c47 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/PostgresDebeziumConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/PostgresDebeziumConverter.java
@@ -19,10 +19,8 @@
package org.dinky.cdc.debezium.converter;
+import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
import org.dinky.cdc.debezium.DebeziumCustomConverter;
-
-import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
-
import io.debezium.spi.converter.RelationalColumn;
/**
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/SqlServerDebeziumConverter.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/SqlServerDebeziumConverter.java
index a3ddef78c5..d5d2588bb2 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/SqlServerDebeziumConverter.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/debezium/converter/SqlServerDebeziumConverter.java
@@ -21,7 +21,7 @@
import org.dinky.cdc.debezium.DebeziumCustomConverter;
-import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
+import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
import java.time.ZoneOffset;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/dm/DmCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/dm/DmCDCBuilder.java
new file mode 100644
index 0000000000..7d57be898d
--- /dev/null
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/dm/DmCDCBuilder.java
@@ -0,0 +1,131 @@
+package org.dinky.cdc.dm;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/3/19 16:30
+ * Description:
+ */
+
+import com.ververica.cdc.connectors.base.options.StartupOptions;
+import com.ververica.cdc.connectors.dm.source.DmSourceBuilder;
+import com.ververica.cdc.debezium.GenericJsonDebeziumDeserializationSchema;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.dinky.assertion.Asserts;
+import org.dinky.cdc.AbstractCDCBuilder;
+import org.dinky.cdc.CDCBuilder;
+import org.dinky.constant.FlinkParamConstant;
+import org.dinky.data.model.FlinkCDCConfig;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public class DmCDCBuilder extends AbstractCDCBuilder {
+
+ public static final String KEY_WORD = "dm-cdc";
+ private static final String METADATA_TYPE = "dm";
+
+ public DmCDCBuilder() {
+ }
+
+ public DmCDCBuilder(FlinkCDCConfig config) {
+ super(config);
+ }
+
+ @Override
+ public String getHandle() {
+ return KEY_WORD;
+ }
+
+ @Override
+ public CDCBuilder create(FlinkCDCConfig config) {
+ return new DmCDCBuilder(config);
+ }
+
+ @Override
+ public DataStreamSource build(StreamExecutionEnvironment env) {
+ Properties debeziumProperties = new Properties();// 为部分转换添加默认值
+ debeziumProperties.setProperty("bigint.unsigned.handling.mode", "long");
+ debeziumProperties.setProperty("decimal.handling.mode", "string");
+
+ for (Map.Entry entry : config.getDebezium().entrySet()) {
+ if (Asserts.isNotNullString(entry.getKey()) && Asserts.isNotNullString(entry.getValue())) {
+ debeziumProperties.setProperty(entry.getKey(), entry.getValue());
+ }
+ }
+ DmSourceBuilder sourceBuilder = new DmSourceBuilder()
+ .hostname(config.getHostname())
+ .port(config.getPort())
+ .databaseList(config.getDatabase())
+ .username(config.getUsername())
+ .password(config.getPassword())
+ .deserializer(new GenericJsonDebeziumDeserializationSchema())
+ .includeSchemaChanges(true)
+ .debeziumProperties(debeziumProperties);
+
+ String schema = config.getSchema();
+ if (Asserts.isNotNullString(schema)) {
+ String[] schemas = schema.split(FlinkParamConstant.SPLIT);
+ sourceBuilder.schemaList(schemas);
+ } else {
+ sourceBuilder.schemaList(new String[0]);
+ }
+ List schemaTableNameList = config.getSchemaTableNameList();
+ if (Asserts.isNotNullCollection(schemaTableNameList)) {
+ sourceBuilder.tableList(schemaTableNameList.toArray(new String[schemaTableNameList.size()]));
+ } else {
+ sourceBuilder.tableList(new String[0]);
+ }
+
+ if (Asserts.isNotNullString(config.getStartupMode())) {
+ switch (config.getStartupMode().toLowerCase()) {
+ case "initial":
+ sourceBuilder.startupOptions(StartupOptions.initial());
+ break;
+ case "latest-offset":
+ sourceBuilder.startupOptions(StartupOptions.latest());
+ break;
+ default:
+ }
+ } else {
+ sourceBuilder.startupOptions(StartupOptions.latest());
+ }
+ return env.fromSource(sourceBuilder.build(), WatermarkStrategy.noWatermarks(), "Dameng CDC Source");
+ }
+
+ @Override
+ public String getSchema() {
+ return config.getSchema();
+ }
+
+ @Override
+ protected String getMetadataType() {
+ return METADATA_TYPE;
+ }
+
+ @Override
+ protected String generateUrl(String schema) {
+ return String.format(
+ "jdbc:dm://%s:%d/%s%s",
+ config.getHostname(), config.getPort(), schema, composeJdbcProperties(config.getJdbc()));
+ }
+
+ private String composeJdbcProperties(Map jdbcProperties) {
+ if (jdbcProperties == null || jdbcProperties.isEmpty()) {
+ return "";
+ }
+
+ StringBuilder sb = new StringBuilder();
+ sb.append('?');
+ jdbcProperties.forEach((k, v) -> {
+ sb.append(k);
+ sb.append("=");
+ sb.append(v);
+ sb.append("&");
+ });
+ sb.deleteCharAt(sb.length() - 1);
+ return sb.toString();
+ }
+}
\ No newline at end of file
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kingbase/KingbaseCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kingbase/KingbaseCDCBuilder.java
new file mode 100644
index 0000000000..a1a91ea644
--- /dev/null
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kingbase/KingbaseCDCBuilder.java
@@ -0,0 +1,82 @@
+package org.dinky.cdc.kingbase;
+
+import com.ververica.cdc.connectors.kingbase.KingbaseSource;
+import com.ververica.cdc.debezium.DebeziumSourceFunction;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
+import org.apache.commons.lang3.StringUtils;
+import org.dinky.assertion.Asserts;
+import org.dinky.cdc.CDCBuilder;
+import org.dinky.cdc.postgres.PostgresCDCBuilder;
+import org.dinky.data.model.FlinkCDCConfig;
+
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * Author: lwjhn
+ * Date: 2023/8/24 14:34
+ * Description:
+ */
+public class KingbaseCDCBuilder extends PostgresCDCBuilder {
+ public static final String KEY_WORD = "kingbase-cdc";
+
+ public KingbaseCDCBuilder() {
+ }
+
+ public KingbaseCDCBuilder(FlinkCDCConfig config) {
+ super(config);
+ }
+
+ @Override
+ public String getHandle() {
+ return KEY_WORD;
+ }
+
+ @Override
+ public String getMetadataType() {
+ return "kingbase";
+ }
+
+ @Override
+ public String getSourceName() {
+ return "Kingbase CDC Source";
+ }
+
+ @Override
+ public String getDriverPrefix() {
+ return "jdbc:kingbase8";
+ }
+
+ @Override
+ public CDCBuilder create(FlinkCDCConfig config) {
+ return new KingbaseCDCBuilder(config);
+ }
+
+
+ public DebeziumSourceFunction createSourceFunction(Properties debeziumProperties, String decodingPluginName, String slotName) {
+ KingbaseSource.Builder sourceBuilder = KingbaseSource.builder()
+ .hostname(config.getHostname())
+ .port(config.getPort())
+ .username(config.getUsername())
+ .password(config.getPassword())
+ .database(config.getDatabase());
+
+ List schemaTableNameList = config.getSchemaTableNameList();
+ if (Asserts.isNotNullCollection(schemaTableNameList)) {
+ sourceBuilder.tableList(schemaTableNameList.toArray(new String[0]));
+ } else {
+ sourceBuilder.tableList();
+ }
+
+ sourceBuilder.deserializer(new JsonDebeziumDeserializationSchema());
+ sourceBuilder.debeziumProperties(debeziumProperties);
+
+ if(StringUtils.isNotBlank(slotName)){
+ sourceBuilder.slotName(slotName);
+ }
+ if(StringUtils.isNotBlank(decodingPluginName)){
+ sourceBuilder.decodingPluginName(decodingPluginName);
+ }
+ return sourceBuilder.build();
+ }
+}
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/mysql/MysqlCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/mysql/MysqlCDCBuilder.java
index 1014ae4219..d1c21ddca2 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/mysql/MysqlCDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/mysql/MysqlCDCBuilder.java
@@ -19,20 +19,19 @@
package org.dinky.cdc.mysql;
+import com.ververica.cdc.connectors.mysql.source.MySqlSource;
+import com.ververica.cdc.connectors.mysql.source.MySqlSourceBuilder;
+import com.ververica.cdc.connectors.mysql.table.StartupOptions;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.dinky.assertion.Asserts;
import org.dinky.cdc.AbstractCDCBuilder;
import org.dinky.cdc.CDCBuilder;
import org.dinky.constant.FlinkParamConstant;
import org.dinky.data.model.FlinkCDCConfig;
-import org.apache.flink.api.common.eventtime.WatermarkStrategy;
-import org.apache.flink.cdc.connectors.mysql.source.MySqlSource;
-import org.apache.flink.cdc.connectors.mysql.source.MySqlSourceBuilder;
-import org.apache.flink.cdc.connectors.mysql.table.StartupOptions;
-import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema;
-import org.apache.flink.streaming.api.datastream.DataStreamSource;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
import java.time.Duration;
import java.util.List;
import java.util.Map;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/oracle/OracleCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/oracle/OracleCDCBuilder.java
index a37129fe67..1898df7747 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/oracle/OracleCDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/oracle/OracleCDCBuilder.java
@@ -19,18 +19,17 @@
package org.dinky.cdc.oracle;
+import com.ververica.cdc.connectors.base.options.StartupOptions;
+import com.ververica.cdc.connectors.oracle.OracleSource;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.dinky.assertion.Asserts;
import org.dinky.cdc.AbstractCDCBuilder;
import org.dinky.cdc.CDCBuilder;
import org.dinky.constant.FlinkParamConstant;
import org.dinky.data.model.FlinkCDCConfig;
-import org.apache.flink.cdc.connectors.base.options.StartupOptions;
-import org.apache.flink.cdc.connectors.oracle.OracleSource;
-import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema;
-import org.apache.flink.streaming.api.datastream.DataStreamSource;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
import java.util.List;
import java.util.Properties;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/postgres/PostgresCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/postgres/PostgresCDCBuilder.java
index a70647055d..9221f3b1fc 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/postgres/PostgresCDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/postgres/PostgresCDCBuilder.java
@@ -19,25 +19,24 @@
package org.dinky.cdc.postgres;
+import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
+import com.ververica.cdc.debezium.DebeziumSourceFunction;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.dinky.assertion.Asserts;
import org.dinky.cdc.AbstractCDCBuilder;
import org.dinky.cdc.CDCBuilder;
import org.dinky.constant.FlinkParamConstant;
import org.dinky.data.model.FlinkCDCConfig;
-import org.apache.flink.cdc.connectors.postgres.PostgreSQLSource;
-import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema;
-import org.apache.flink.streaming.api.datastream.DataStreamSource;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class PostgresCDCBuilder extends AbstractCDCBuilder implements CDCBuilder {
-
public static final String KEY_WORD = "postgres-cdc";
- private static final String METADATA_TYPE = "PostgreSql";
public PostgresCDCBuilder() {}
@@ -51,23 +50,24 @@ public String getHandle() {
}
@Override
- public CDCBuilder create(FlinkCDCConfig config) {
- return new PostgresCDCBuilder(config);
+ protected String getMetadataType() {
+ return "PostgreSql";
}
- @Override
- public DataStreamSource build(StreamExecutionEnvironment env) {
+ public String getSourceName(){
+ return "Postgres CDC Source";
+ }
- String decodingPluginName = config.getSource().get("decoding.plugin.name");
- String slotName = config.getSource().get("slot.name");
+ public String getDriverPrefix(){
+ return "jdbc:postgresql";
+ }
- Properties debeziumProperties = new Properties();
- for (Map.Entry entry : config.getDebezium().entrySet()) {
- if (Asserts.isNotNullString(entry.getKey()) && Asserts.isNotNullString(entry.getValue())) {
- debeziumProperties.setProperty(entry.getKey(), entry.getValue());
- }
- }
+ @Override
+ public CDCBuilder create(FlinkCDCConfig config) {
+ return new PostgresCDCBuilder(config);
+ }
+ public DebeziumSourceFunction createSourceFunction(Properties debeziumProperties, String decodingPluginName, String slotName) {
PostgreSQLSource.Builder sourceBuilder = PostgreSQLSource.builder()
.hostname(config.getHostname())
.port(config.getPort())
@@ -95,12 +95,34 @@ public DataStreamSource build(StreamExecutionEnvironment env) {
if (Asserts.isNotNullString(decodingPluginName)) {
sourceBuilder.decodingPluginName(decodingPluginName);
}
-
if (Asserts.isNotNullString(slotName)) {
sourceBuilder.slotName(slotName);
}
+ return sourceBuilder.build();
+ }
+
+ @Override
+ public DataStreamSource build(StreamExecutionEnvironment env) {
+ Properties debeziumProperties = new Properties();
+ // 为部分转换添加默认值
+ debeziumProperties.setProperty("bigint.unsigned.handling.mode", "long");
+ debeziumProperties.setProperty("decimal.handling.mode", "string");
- return env.addSource(sourceBuilder.build(), "Postgres CDC Source");
+ for (Map.Entry entry : config.getDebezium().entrySet()) {
+ if (Asserts.isNotNullString(entry.getKey()) && Asserts.isNotNullString(entry.getValue())) {
+ debeziumProperties.setProperty(entry.getKey(), entry.getValue());
+ }
+ }
+
+ String decodingPluginName = config.getSource().get("decoding.plugin.name");
+ String slotName = config.getSource().get("slot.name");
+ if(StringUtils.isBlank(slotName)){
+ slotName = debeziumProperties.getProperty("slot.name", "flink");
+ } else {
+ debeziumProperties.setProperty("slot.name", slotName);
+ }
+
+ return env.addSource(createSourceFunction(debeziumProperties, decodingPluginName, slotName), getSourceName());
}
@Override
@@ -110,12 +132,6 @@ public String getSchema() {
@Override
public String generateUrl(String schema) {
- String format = "jdbc:postgresql://%s:%s/%s";
- return String.format(format, config.getHostname(), config.getPort(), config.getDatabase());
- }
-
- @Override
- protected String getMetadataType() {
- return METADATA_TYPE;
+ return String.format("%s://%s:%s/%s", getDriverPrefix(), config.getHostname(), config.getPort(), config.getDatabase());
}
}
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/AbstractSqlSinkBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/AbstractSqlSinkBuilder.java
index cbbd03b315..fbb9bd033f 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/AbstractSqlSinkBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/AbstractSqlSinkBuilder.java
@@ -19,6 +19,9 @@
package org.dinky.cdc.sql;
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.flink.configuration.ConfigOption;
+import org.apache.flink.configuration.ConfigOptions;
import org.dinky.assertion.Asserts;
import org.dinky.cdc.AbstractSinkBuilder;
import org.dinky.cdc.convert.DataTypeConverter;
@@ -48,6 +51,7 @@
import org.apache.flink.util.OutputTag;
import java.io.Serializable;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
@@ -55,6 +59,10 @@
import java.util.stream.Collectors;
public abstract class AbstractSqlSinkBuilder extends AbstractSinkBuilder implements Serializable {
+ private static final long serialVersionUID = -4655498892278666404L;
+ protected ZoneId sinkTimeZone = ZoneId.of("UTC");
+ public static final ConfigOption DROP_TABLE_IF_EXISTS = ConfigOptions.key("drop.table.if.exists").booleanType().defaultValue(false);
+ public static final ConfigOption CREATE_TEMPORARY_TABLE = ConfigOptions.key("create.temporary.table").booleanType().defaultValue(false);
protected AbstractSqlSinkBuilder() {}
@@ -70,15 +78,15 @@ private FlatMapFunction sinkRowFunction(
switch (value.get("op").toString()) {
case "r":
case "c":
- rowCollect(columnNameList, columnTypeList, out, RowKind.INSERT, (Map) value.get("after"));
+ rowCollect(columnNameList, columnTypeList, out, RowKind.INSERT, (Map) value.get("after"), schemaTableName);
break;
case "d":
- rowCollect(columnNameList, columnTypeList, out, RowKind.DELETE, (Map) value.get("before"));
+ rowCollect(columnNameList, columnTypeList, out, RowKind.DELETE, (Map) value.get("before"), schemaTableName);
break;
case "u":
rowCollect(
- columnNameList, columnTypeList, out, RowKind.UPDATE_BEFORE, (Map) value.get("before"));
- rowCollect(columnNameList, columnTypeList, out, RowKind.UPDATE_AFTER, (Map) value.get("after"));
+ columnNameList, columnTypeList, out, RowKind.UPDATE_BEFORE, (Map) value.get("before"), schemaTableName);
+ rowCollect(columnNameList, columnTypeList, out, RowKind.UPDATE_AFTER, (Map) value.get("after"), schemaTableName);
break;
default:
}
@@ -87,7 +95,7 @@ private FlatMapFunction sinkRowFunction(
"SchemaTable: {} - Row: {} - Exception {}",
schemaTableName,
JsonUtils.toJsonString(value),
- e.toString());
+ ExceptionUtils.getStackTrace(e));
throw e;
}
};
@@ -99,16 +107,23 @@ private void rowCollect(
List columnTypeList,
Collector out,
RowKind rowKind,
- Map value) {
+ Map value, String schemaTableName) {
if (Asserts.isNull(value)) {
return;
}
Row row = Row.withPositions(rowKind, columnNameList.size());
for (int i = 0; i < columnNameList.size(); i++) {
- row.setField(
- i,
- DataTypeConverter.convertToRow(
- value.get(columnNameList.get(i)), columnTypeList.get(i), sinkTimeZone));
+ LogicalType logicalType = columnTypeList.get(i);
+ String name = columnNameList.get(i);
+ Object val = value.get(name);
+ try {
+ row.setField(i, DataTypeConverter.convertToRow(val, logicalType, sinkTimeZone));
+ } catch (RuntimeException e) {
+ logger.info("AbstractSqlSinkBuilder.rowCollect : field -> {}#{} ; javaType -> {} ; flinkType -> {} ; value -> {}",
+ schemaTableName, name,
+ val == null ? null : val.getClass().getName(), columnTypeList.get(i).getTypeRoot().name(), val);
+ throw e;
+ }
}
out.collect(row);
}
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/SQLSinkBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/SQLSinkBuilder.java
index b869276bd7..2f917d6dea 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/SQLSinkBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/SQLSinkBuilder.java
@@ -29,7 +29,19 @@
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.types.Row;
-
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl;
+import org.apache.flink.table.catalog.*;
+import org.apache.flink.table.connector.ChangelogMode;
+import org.apache.flink.table.expressions.ApiExpressionUtils;
+import org.apache.flink.table.operations.ExternalQueryOperation;
+import org.apache.flink.table.operations.QueryOperation;
+import org.apache.flink.table.operations.utils.OperationTreeBuilder;
+import org.apache.flink.util.Preconditions;
+
+import javax.annotation.Nullable;
+import java.util.stream.Collectors;
import java.io.Serializable;
import java.util.List;
@@ -48,12 +60,76 @@ private String addSourceTableView(DataStream rowDataDataStream, Table table
// Because the name of the view on Flink is not allowed to have -, it needs to be replaced with - here_
String viewName = replaceViewNameMiddleLineToUnderLine("VIEW_" + table.getSchemaTableNameWithUnderline());
- customTableEnvironment.createTemporaryView(
- viewName, customTableEnvironment.fromChangelogStream(rowDataDataStream));
- logger.info("Create {} temporaryView successful...", viewName);
+// customTableEnvironment.createTemporaryView(
+// viewName, customTableEnvironment.fromChangelogStream(rowDataDataStream));
+// logger.info("Create {} temporaryView successful...", viewName);
+ if(!(customTableEnvironment.getTableEnvironment() instanceof StreamTableEnvironmentImpl)) {
+ throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that has been passed to the StreamTableEnvironment during instantiation.");
+ }
+ UnresolvedIdentifier identifier = UnresolvedIdentifier.of(StringUtils.isBlank(table.getCatalog()) ? (StringUtils.isBlank(table.getDriverType()) ? "default_catalog" : table.getDriverType()).toLowerCase() : table.getCatalog(),
+ StringUtils.isBlank(table.getSchema()) ? "default_database" : table.getSchema(), table.getName());
+ customTableEnvironment.createTemporaryView(viewName,
+ fromStreamInternal((StreamTableEnvironmentImpl) customTableEnvironment.getTableEnvironment(), rowDataDataStream, null,
+ identifier , ChangelogMode.insertOnly()));
+ logger.info("Create {} temporaryView successful... {} \n", viewName, identifier.asSummaryString());
return viewName;
}
+ protected org.apache.flink.table.api.Table fromStreamInternal(
+ StreamTableEnvironmentImpl environment,
+ DataStream dataStream,
+ @Nullable Schema schema,
+ @Nullable UnresolvedIdentifier unresolvedIdentifier,
+ ChangelogMode changelogMode) {
+ Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
+ Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
+
+ CatalogManager catalogManager = environment.getCatalogManager();
+ OperationTreeBuilder operationTreeBuilder = environment.getOperationTreeBuilder();
+
+ SchemaTranslator.ConsumingResult schemaTranslationResult =
+ SchemaTranslator.createConsumingResult(
+ catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
+
+ ResolvedCatalogTable resolvedCatalogTable =
+ catalogManager.resolveCatalogTable(
+ new ExternalCatalogTable(schemaTranslationResult.getSchema()));
+
+ ContextResolvedTable contextResolvedTable;
+ if (unresolvedIdentifier != null) {
+ ObjectIdentifier objectIdentifier =
+ catalogManager.qualifyIdentifier(unresolvedIdentifier);
+ contextResolvedTable =
+ ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
+ } else {
+ contextResolvedTable =
+ ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
+ }
+
+ QueryOperation scanOperation =
+ new ExternalQueryOperation(
+ contextResolvedTable,
+ dataStream,
+ schemaTranslationResult.getPhysicalDataType(),
+ schemaTranslationResult.isTopLevelRecord(),
+ changelogMode);
+
+ List projections = schemaTranslationResult.getProjections();
+ if (projections == null) {
+ return environment.createTable(scanOperation);
+ }
+
+ final QueryOperation projectOperation =
+ operationTreeBuilder.project(
+ projections.stream()
+ .map(ApiExpressionUtils::unresolvedRef)
+ .collect(Collectors.toList()),
+ scanOperation);
+
+
+ return environment.createTable(projectOperation);
+ }
+
@Override
protected void addTableSink(DataStream rowDataDataStream, Table table) {
final String viewName = addSourceTableView(rowDataDataStream, table);
@@ -78,9 +154,16 @@ protected void addTableSink(DataStream rowDataDataStream, Table table) {
private List addSinkInsert(
Table table, String viewName, String tableName, String sinkSchemaName, String sinkTableName) {
+ String flinkDDL; boolean isTemporary = customTableEnvironment.getConfig().get(CREATE_TEMPORARY_TABLE);
+ if(customTableEnvironment.getConfig().get(DROP_TABLE_IF_EXISTS)){
+ flinkDDL = FlinkStatementUtil.getFlinkDropDDL(sinkSchemaName, sinkTableName, isTemporary);
+ logger.info(flinkDDL);
+ customTableEnvironment.executeSql(flinkDDL);
+ }
+
String pkList = StringUtils.join(getPKList(table), ".");
- String flinkDDL =
- FlinkStatementUtil.getFlinkDDL(table, tableName, config, sinkSchemaName, sinkTableName, pkList);
+ flinkDDL = FlinkStatementUtil.getFlinkDDL(table, tableName, config, sinkSchemaName, sinkTableName, pkList, isTemporary);
+
logger.info(flinkDDL);
customTableEnvironment.executeSql(flinkDDL);
logger.info("Create {} FlinkSQL DDL successful...", tableName);
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/catalog/SQLCatalogSinkBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/catalog/SQLCatalogSinkBuilder.java
index bc59a989a4..4018d389c7 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/catalog/SQLCatalogSinkBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sql/catalog/SQLCatalogSinkBuilder.java
@@ -31,6 +31,7 @@
import java.io.Serializable;
public class SQLCatalogSinkBuilder extends AbstractSqlSinkBuilder implements Serializable {
+ private static final long serialVersionUID = 2185746285547955809L;
public static final String KEY_WORD = "sql-catalog";
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sqlserver/SqlServerCDCBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sqlserver/SqlServerCDCBuilder.java
index a0be5a4531..892947daf4 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sqlserver/SqlServerCDCBuilder.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/sqlserver/SqlServerCDCBuilder.java
@@ -19,15 +19,15 @@
package org.dinky.cdc.sqlserver;
+import com.ververica.cdc.connectors.sqlserver.table.StartupOptions;
import org.dinky.assertion.Asserts;
import org.dinky.cdc.AbstractCDCBuilder;
import org.dinky.cdc.CDCBuilder;
import org.dinky.constant.FlinkParamConstant;
import org.dinky.data.model.FlinkCDCConfig;
-import org.apache.flink.cdc.connectors.base.options.StartupOptions;
-import org.apache.flink.cdc.connectors.sqlserver.SqlServerSource;
-import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema;
+import com.ververica.cdc.connectors.sqlserver.SqlServerSource;
+import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/utils/FlinkStatementUtil.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/utils/FlinkStatementUtil.java
index cff6504718..267a467310 100644
--- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/utils/FlinkStatementUtil.java
+++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/utils/FlinkStatementUtil.java
@@ -27,6 +27,7 @@
import org.apache.flink.runtime.util.EnvironmentInformation;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
@@ -63,18 +64,27 @@ public static String getColumnProcessing(Column column, FlinkCDCConfig config) {
}
}
- public static String getFlinkDDL(
- Table table,
- String tableName,
- FlinkCDCConfig config,
- String sinkSchemaName,
- String sinkTableName,
- String pkList) {
+ public static String getTemporaryKey(boolean isTemporary){
+ return isTemporary ? "TEMPORARY" : "";
+ }
+
+ public static String getFlinkDropDDL(String sinkSchemaName, String sinkTableName, boolean isTemporary) {
+ return String.format("DROP %s TABLE IF EXISTS %s;", getTemporaryKey(isTemporary), sinkTableName);
+ }
+
+ public static String getFlinkDDL(Table table, String tableName, FlinkCDCConfig config, String sinkSchemaName, String sinkTableName, String pkList){
+ return getFlinkDDL(table, tableName, config, sinkSchemaName, sinkTableName, pkList, false);
+ }
+
+ public static String getFlinkDDL(Table table, String tableName, FlinkCDCConfig config, String sinkSchemaName,
+ String sinkTableName, String pkList, boolean isTemporary) {
StringBuilder sb = new StringBuilder();
+ sb.append("CREATE ");
+ sb.append(getTemporaryKey(isTemporary));
if (Integer.parseInt(EnvironmentInformation.getVersion().split("\\.")[1]) < 13) {
- sb.append("CREATE TABLE `");
+ sb.append(" TABLE `");
} else {
- sb.append("CREATE TABLE IF NOT EXISTS `");
+ sb.append(" TABLE IF NOT EXISTS `");
}
sb.append(tableName);
sb.append("` (\n");
@@ -150,12 +160,10 @@ private static String convertSinkColumnType(String type, FlinkCDCConfig config)
private static String getSinkConfigurationString(
FlinkCDCConfig config, String sinkSchemaName, String sinkTableName, String pkList) {
- String configurationString =
- SqlUtil.replaceAllParam(config.getSinkConfigurationString(), "schemaName", sinkSchemaName);
- configurationString = SqlUtil.replaceAllParam(configurationString, "tableName", sinkTableName);
- if (configurationString.contains("#{pkList}")) {
- configurationString = SqlUtil.replaceAllParam(configurationString, "pkList", pkList);
- }
- return configurationString;
+ return SqlUtil.replaceAllParam(config.getSinkConfigurationString(), new HashMap(){{
+ put("schemaName", sinkSchemaName);
+ put("tableName", sinkTableName);
+ put("pkList", pkList);
+ }});
}
}
diff --git a/dinky-cdc/pom.xml b/dinky-cdc/pom.xml
index 5d1b7a562c..a6872cf006 100644
--- a/dinky-cdc/pom.xml
+++ b/dinky-cdc/pom.xml
@@ -53,5 +53,11 @@
dinky-cdc-plus
+
+ flink-1.20
+
+ dinky-cdc-plus
+
+
diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java b/dinky-client/dinky-client-1.16/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
index 0e44aa5a0f..50b14751ff 100644
--- a/dinky-client/dinky-client-1.16/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
+++ b/dinky-client/dinky-client-1.16/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
@@ -1674,6 +1674,7 @@ private void setClusterEntrypointInfoToConfig(final ApplicationReport report) {
final int port = report.getRpcPort();
LOG.info("Found Web Interface {}:{} of application '{}'.", host, port, appId);
+ LOG.info("Found TracKing UI : {}", report.getTrackingUrl());
flinkConfiguration.setString(JobManagerOptions.ADDRESS, host);
flinkConfiguration.setInteger(JobManagerOptions.PORT, port);
diff --git a/dinky-client/dinky-client-1.20/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java b/dinky-client/dinky-client-1.20/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
index 01ff1f40a2..e535bce3b6 100644
--- a/dinky-client/dinky-client-1.20/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
+++ b/dinky-client/dinky-client-1.20/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
@@ -1734,6 +1734,7 @@ private void setClusterEntrypointInfoToConfig(final ApplicationReport report) {
final int port = report.getRpcPort();
LOG.info("Found Web Interface {}:{} of application '{}'.", host, port, appId);
+ LOG.info("Found TracKing UI : {}", report.getTrackingUrl());
flinkConfiguration.set(JobManagerOptions.ADDRESS, host);
flinkConfiguration.set(JobManagerOptions.PORT, port);
diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/flink/checkpoint/source/MysqlCdcSource.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/flink/checkpoint/source/MysqlCdcSource.java
index 7230fc01c5..ccb17269b7 100644
--- a/dinky-client/dinky-client-base/src/main/java/org/dinky/flink/checkpoint/source/MysqlCdcSource.java
+++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/flink/checkpoint/source/MysqlCdcSource.java
@@ -19,21 +19,19 @@
package org.dinky.flink.checkpoint.source;
-import org.dinky.flink.checkpoint.SupportSplitSerializer;
-
-import org.apache.flink.cdc.connectors.mysql.source.split.MySqlBinlogSplit;
-import org.apache.flink.cdc.connectors.mysql.source.split.MySqlSnapshotSplit;
-import org.apache.flink.cdc.connectors.mysql.source.split.MySqlSplit;
-import org.apache.flink.cdc.connectors.mysql.source.split.MySqlSplitSerializer;
-
-import java.util.List;
-import java.util.stream.Collectors;
-
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
+import com.ververica.cdc.connectors.mysql.source.split.MySqlBinlogSplit;
+import com.ververica.cdc.connectors.mysql.source.split.MySqlSnapshotSplit;
+import com.ververica.cdc.connectors.mysql.source.split.MySqlSplit;
+import com.ververica.cdc.connectors.mysql.source.split.MySqlSplitSerializer;
+import org.dinky.flink.checkpoint.SupportSplitSerializer;
+
+import java.util.List;
+import java.util.stream.Collectors;
@SupportSplitSerializer(clazz = MySqlSplitSerializer.class, order = -1)
public class MysqlCdcSource extends BaseCheckpointSource {
diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/FlinkTableMetadataUtil.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/FlinkTableMetadataUtil.java
index 5260c5fdcf..3e1857f24f 100644
--- a/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/FlinkTableMetadataUtil.java
+++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/FlinkTableMetadataUtil.java
@@ -100,7 +100,7 @@ public static List getColumnList(
.keyFlag(isPrimaryKey.get())
.isNullable(logicalType.isNullable())
.position(i)
- .build();
+ .autoIncrement(false).build();
if (logicalType instanceof VarCharType) {
column.setLength(((VarCharType) logicalType).getLength());
} else if (logicalType instanceof TimestampType) {
diff --git a/dinky-common/pom.xml b/dinky-common/pom.xml
index b1aebeaf4b..e06ec05d02 100644
--- a/dinky-common/pom.xml
+++ b/dinky-common/pom.xml
@@ -195,6 +195,10 @@
com.github.oshi
oshi-core
+
+ com.fasterxml.jackson.datatype
+ jackson-datatype-jdk8
+
diff --git a/dinky-common/src/main/java/org/dinky/data/model/FlinkCDCConfig.java b/dinky-common/src/main/java/org/dinky/data/model/FlinkCDCConfig.java
index 406954b6bb..2e332cfbbd 100644
--- a/dinky-common/src/main/java/org/dinky/data/model/FlinkCDCConfig.java
+++ b/dinky-common/src/main/java/org/dinky/data/model/FlinkCDCConfig.java
@@ -171,7 +171,7 @@ public String getSinkConfigurationString() {
return "'connector' = 'dinky-mock'";
}
return sink.entrySet().stream()
- .filter(t -> !isSkip(t.getKey()))
+ .filter(t -> !(isSkip(t.getKey()) || ("connector".equals(t.getKey()) && "paimon".equalsIgnoreCase(t.getValue())) ))
.map(t -> String.format("'%s' = '%s'", t.getKey(), t.getValue()))
.collect(Collectors.joining(",\n"));
}
diff --git a/dinky-common/src/main/java/org/dinky/data/model/Table.java b/dinky-common/src/main/java/org/dinky/data/model/Table.java
index e8717c15c7..040270691b 100644
--- a/dinky-common/src/main/java/org/dinky/data/model/Table.java
+++ b/dinky-common/src/main/java/org/dinky/data/model/Table.java
@@ -109,14 +109,10 @@ public static Table build(String name, String schema, List columns) {
@Transient
public String getFlinkTableWith(String flinkConfig) {
- if (Asserts.isNotNullString(flinkConfig)) {
- Map replacements = new HashMap<>();
- replacements.put("schemaName", schema);
- replacements.put("tableName", name);
-
- return SqlUtil.replaceAllParam(flinkConfig, replacements);
- }
- return "";
+ return SqlUtil.replaceAllParam(flinkConfig, new HashMap(){{
+ put("schemaName", schema);
+ put("tableName", name);
+ }});
}
@Transient
diff --git a/dinky-common/src/main/java/org/dinky/data/result/AbstractResult.java b/dinky-common/src/main/java/org/dinky/data/result/AbstractResult.java
index 9a18dbdf02..2112e72c65 100644
--- a/dinky-common/src/main/java/org/dinky/data/result/AbstractResult.java
+++ b/dinky-common/src/main/java/org/dinky/data/result/AbstractResult.java
@@ -21,6 +21,7 @@
import java.time.Duration;
import java.time.LocalDateTime;
+import java.util.List;
/**
* AbstractResult
@@ -28,12 +29,20 @@
* @since 2021/6/29 22:49
*/
public class AbstractResult {
-
protected boolean success;
protected LocalDateTime startTime;
protected LocalDateTime endTime;
protected long time;
protected String error;
+ protected List> columnList;
+
+ public List> getColumnList() {
+ return columnList;
+ }
+
+ public void setColumnList(List> columnList) {
+ this.columnList = columnList;
+ }
public void success() {
this.setEndTime(LocalDateTime.now());
diff --git a/dinky-common/src/main/java/org/dinky/utils/JsonUtils.java b/dinky-common/src/main/java/org/dinky/utils/JsonUtils.java
index b3110927b5..32846d4279 100644
--- a/dinky-common/src/main/java/org/dinky/utils/JsonUtils.java
+++ b/dinky-common/src/main/java/org/dinky/utils/JsonUtils.java
@@ -25,6 +25,7 @@
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
import static java.nio.charset.StandardCharsets.UTF_8;
+import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import org.dinky.assertion.Asserts;
import org.dinky.serializer.LocalDateTimeDeserializer;
import org.dinky.serializer.LocalDateTimeSerializer;
@@ -86,6 +87,7 @@ public class JsonUtils {
.configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true)
.configure(REQUIRE_SETTERS_FOR_GETTERS, true)
.registerModule(javaTimeModule)
+ .registerModule(new Jdk8Module())
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.setTimeZone(TimeZone.getDefault());
}
diff --git a/dinky-common/src/main/java/org/dinky/utils/SqlUtil.java b/dinky-common/src/main/java/org/dinky/utils/SqlUtil.java
index 300b89227b..0c4fded278 100644
--- a/dinky-common/src/main/java/org/dinky/utils/SqlUtil.java
+++ b/dinky-common/src/main/java/org/dinky/utils/SqlUtil.java
@@ -20,6 +20,11 @@
package org.dinky.utils;
import org.dinky.assertion.Asserts;
+import org.springframework.expression.EvaluationContext;
+import org.springframework.expression.ExpressionParser;
+import org.springframework.expression.common.TemplateParserContext;
+import org.springframework.expression.spel.standard.SpelExpressionParser;
+import org.springframework.expression.spel.support.StandardEvaluationContext;
import java.util.Map;
import java.util.regex.Pattern;
@@ -68,24 +73,35 @@ public static String removeNote(String sql) {
return sql;
}
+ /*
public static String replaceAllParam(String sql, String name, String value) {
return sql.replaceAll("#\\{" + name + "\\}", value);
- }
+ }*/
- /**
- * replace sql context with values params, map's key is origin variable express by `${key}`,
- * value is replacement. for example, if key="name", value="replacement", and sql is "${name}",
- * the result will be "replacement".
- *
- * @param sql sql context
- * @param values replacement
- * @return replace variable result
- */
public static String replaceAllParam(String sql, Map values) {
+ if (Asserts.isNullString(sql)) {
+ return "";
+ }
+ EvaluationContext context = new StandardEvaluationContext(values);
for (Map.Entry entry : values.entrySet()) {
sql = replaceAllParam(sql, entry.getKey(), entry.getValue());
+ context.setVariable(entry.getKey(), entry.getValue());
}
- return sql;
+ return replaceAllParamSPEL(sql, context);
+ }
+
+ public static String replaceAllParam(String sql, String name, String value) {
+ return sql.replaceAll("\\$\\{" + name + "}", value);
+ }
+
+ private static final TemplateParserContext template = new TemplateParserContext();
+ private static final ExpressionParser parser = new SpelExpressionParser();
+ public static String replaceAllParamSPEL(String expressionString, EvaluationContext context) {
+ return (String) parser.parseExpression(expressionString, template).getValue(context);
+ }
+
+ public static String replaceAllParamSPEL(String expressionString, Object properties) {
+ return (String) parser.parseExpression(expressionString, template).getValue(new StandardEvaluationContext(properties));
}
public static String addLineNumber(String input) {
diff --git a/dinky-core/pom.xml b/dinky-core/pom.xml
index 4da7be2378..b9b528a710 100644
--- a/dinky-core/pom.xml
+++ b/dinky-core/pom.xml
@@ -104,6 +104,21 @@
dinky-metadata-presto
${scope.runtime}
+
+ org.dinky
+ dinky-metadata-dm
+ ${scope.runtime}
+
+
+ org.dinky
+ dinky-metadata-kingbase
+ ${scope.runtime}
+
+
+ org.dinky
+ dinky-metadata-trino
+ ${scope.runtime}
+
com.alibaba
druid-spring-boot-starter
@@ -194,6 +209,18 @@
org.apache.commons
commons-jexl3
+
+ org.dinky
+ dinky-client-base
+
+
+ org.dinky
+ dinky-cdc-core
+
+
+ org.dinky
+ dinky-metadata-base
+
diff --git a/dinky-core/src/main/java/org/dinky/data/result/DDLResultBuilder.java b/dinky-core/src/main/java/org/dinky/data/result/DDLResultBuilder.java
index 4360647df0..4b28a97dbd 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/DDLResultBuilder.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/DDLResultBuilder.java
@@ -30,6 +30,6 @@ public class DDLResultBuilder implements ResultBuilder {
@Override
public IResult getResult(TableResult tableResult) {
- return new DDLResult(true);
+ return ResultBuilder.setResultColumnList(new DDLResult(true), tableResult);
}
}
diff --git a/dinky-core/src/main/java/org/dinky/data/result/MockResultBuilder.java b/dinky-core/src/main/java/org/dinky/data/result/MockResultBuilder.java
index 9372f46916..edb5346fc1 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/MockResultBuilder.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/MockResultBuilder.java
@@ -44,8 +44,8 @@ public IResult getResult(TableResult tableResult) {
if (tableResult.getJobClient().isPresent()) {
MockResultRunnable runnable = new MockResultRunnable(tableResult, id, maxRowNum, isChangeLog, isAutoCancel);
threadPoolExecutor.execute(runnable);
- return SelectResult.buildSuccess(
- tableResult.getJobClient().get().getJobID().toHexString());
+ return ResultBuilder.setResultColumnList(SelectResult.buildSuccess(
+ tableResult.getJobClient().get().getJobID().toHexString()), tableResult);
} else {
return SelectResult.buildFailed();
}
@@ -62,7 +62,7 @@ public IResult getResultWithPersistence(TableResult tableResult, JobHandler jobH
jobHandler.persistResultData(com.google.common.collect.Lists.newArrayList(s));
});
threadPoolExecutor.execute(runnable);
- return SelectResult.buildMockedResult(id);
+ return ResultBuilder.setResultColumnList(SelectResult.buildMockedResult(id), tableResult);
} else {
return SelectResult.buildFailed();
}
diff --git a/dinky-core/src/main/java/org/dinky/data/result/MockResultRunnable.java b/dinky-core/src/main/java/org/dinky/data/result/MockResultRunnable.java
index 28dc9ddda0..ad1c2460bf 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/MockResultRunnable.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/MockResultRunnable.java
@@ -83,7 +83,7 @@ public void run() {
try {
tableResult.getJobClient().ifPresent(jobClient -> {
if (!ResultPool.containsKey(id)) {
- ResultPool.put(SelectResult.buildMockedResult(id));
+ ResultPool.put(ResultBuilder.setResultColumnList(SelectResult.buildMockedResult(id), tableResult));
}
try {
if (isChangeLog) {
diff --git a/dinky-core/src/main/java/org/dinky/data/result/ResultBuilder.java b/dinky-core/src/main/java/org/dinky/data/result/ResultBuilder.java
index be2fbae047..2bdbac7b30 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/ResultBuilder.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/ResultBuilder.java
@@ -19,6 +19,7 @@
package org.dinky.data.result;
+import org.apache.flink.table.catalog.ResolvedSchema;
import org.dinky.data.job.SqlType;
import org.dinky.job.JobHandler;
@@ -31,24 +32,11 @@
*/
public interface ResultBuilder {
- static ResultBuilder build(
- SqlType operationType,
- String id,
- Integer maxRowNum,
- boolean isChangeLog,
- boolean isAutoCancel,
- String timeZone) {
+ static ResultBuilder build(SqlType operationType, String id, Integer maxRowNum, boolean isChangeLog, boolean isAutoCancel, String timeZone) {
return build(operationType, id, maxRowNum, isChangeLog, isAutoCancel, timeZone, false);
}
- static ResultBuilder build(
- SqlType operationType,
- String id,
- Integer maxRowNum,
- boolean isChangeLog,
- boolean isAutoCancel,
- String timeZone,
- boolean isMockSinkFunction) {
+ static ResultBuilder build(SqlType operationType, String id, Integer maxRowNum, boolean isChangeLog, boolean isAutoCancel, String timeZone, boolean isMockSinkFunction) {
switch (operationType) {
case SELECT:
case WITH:
@@ -59,9 +47,7 @@ static ResultBuilder build(
return new ShowResultBuilder(id);
case INSERT:
case EXECUTE:
- return isMockSinkFunction
- ? new MockResultBuilder(id, maxRowNum, isChangeLog, isAutoCancel)
- : new InsertResultBuilder();
+ return isMockSinkFunction ? new MockResultBuilder(id, maxRowNum, isChangeLog, isAutoCancel) : new InsertResultBuilder();
default:
return new DDLResultBuilder();
}
@@ -79,4 +65,16 @@ static ResultBuilder build(
default IResult getResultWithPersistence(TableResult tableResult, JobHandler jobHandler) {
return getResult(tableResult);
}
+
+ static T setResultColumnList(T result, TableResult tableResult) {
+ ResolvedSchema resolvedSchema;
+ if (result.getColumnList() == null && (resolvedSchema = tableResult.getResolvedSchema()) != null) {
+ try {
+ result.setColumnList(resolvedSchema.getColumns());
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ }
+ }
+ return result;
+ }
}
diff --git a/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java b/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java
index 84704624dc..b054db9ce5 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java
@@ -84,7 +84,7 @@ public void run() {
try {
tableResult.getJobClient().ifPresent(jobClient -> {
if (!ResultPool.containsKey(id)) {
- ResultPool.put(new SelectResult(id, new ArrayList<>(), new LinkedHashSet<>()));
+ ResultPool.put(ResultBuilder.setResultColumnList(new SelectResult(id, new ArrayList<>(), new LinkedHashSet<>()), tableResult));
}
try {
if (isChangeLog) {
diff --git a/dinky-core/src/main/java/org/dinky/data/result/SelectResultBuilder.java b/dinky-core/src/main/java/org/dinky/data/result/SelectResultBuilder.java
index 40c5e424b5..318663d0b1 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/SelectResultBuilder.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/SelectResultBuilder.java
@@ -56,7 +56,7 @@ public IResult getResult(TableResult tableResult) {
ResultRunnable runnable =
new ResultRunnable(tableResult, id, maxRowNum, isChangeLog, isAutoCancel, timeZone);
threadPoolExecutor.execute(runnable);
- return SelectResult.buildSuccess(jobId);
+ return ResultBuilder.setResultColumnList(SelectResult.buildSuccess(jobId), tableResult);
} else {
return SelectResult.buildFailed();
}
@@ -82,7 +82,7 @@ public IResult getResultWithPersistence(TableResult tableResult, JobHandler jobH
jobHandler.persistResultData(Lists.newArrayList(s));
});
threadPoolExecutor.execute(runnable);
- return SelectResult.buildSuccess(jobId);
+ return ResultBuilder.setResultColumnList(SelectResult.buildSuccess(jobId), tableResult);
} else {
return SelectResult.buildFailed();
}
diff --git a/dinky-core/src/main/java/org/dinky/data/result/ShowResultBuilder.java b/dinky-core/src/main/java/org/dinky/data/result/ShowResultBuilder.java
index b76a466c29..154fae9212 100644
--- a/dinky-core/src/main/java/org/dinky/data/result/ShowResultBuilder.java
+++ b/dinky-core/src/main/java/org/dinky/data/result/ShowResultBuilder.java
@@ -69,7 +69,7 @@ public IResult getResult(TableResult tableResult) {
}
rows.add(map);
}
- return new org.dinky.data.result.DDLResult(rows, rows.size(), column);
+ return ResultBuilder.setResultColumnList(new DDLResult(rows, rows.size(), column), tableResult);
}
/**
@@ -90,7 +90,7 @@ public IResult getResultWithPersistence(TableResult tableResult, JobHandler jobH
new SelectResult(id, ddlResult.getRowData(), Sets.newLinkedHashSet(ddlResult.getColumns()));
selectResult.setDestroyed(Boolean.TRUE);
try {
- ResultPool.put(selectResult);
+ ResultPool.put(ResultBuilder.setResultColumnList(selectResult, tableResult));
jobHandler.persistResultData(Lists.newArrayList(this.id));
} finally {
ResultPool.remove(id);
diff --git a/dinky-core/src/main/java/org/dinky/parser/CreateCDCSourceSqlParser.java b/dinky-core/src/main/java/org/dinky/parser/CreateCDCSourceSqlParser.java
index 1dde85644f..a57680173b 100644
--- a/dinky-core/src/main/java/org/dinky/parser/CreateCDCSourceSqlParser.java
+++ b/dinky-core/src/main/java/org/dinky/parser/CreateCDCSourceSqlParser.java
@@ -33,6 +33,6 @@ public CreateCDCSourceSqlParser(String originalSql) {
@Override
protected void initializeSegments() {
segments.add(new SqlSegment("CDCSOURCE", "(execute\\s+cdcsource\\s+)(.+)(\\s+with\\s+\\()", "[,]"));
- segments.add(new SqlSegment("WITH", "(with\\s+\\()(.+)(\\))", "',"));
+ segments.add(new SqlSegment("WITH", "(with\\s+\\()(.+)(\\))", "'\\s*,"));
}
}
diff --git a/dinky-core/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java b/dinky-core/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java
index 4b49b7c9ef..9bc2674190 100644
--- a/dinky-core/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java
+++ b/dinky-core/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java
@@ -181,6 +181,10 @@ public TableResult execute(Executor executor) {
}
}
+ if(schemaTableNameList.size()<1){
+ logger.info("Could not find any related tables. Regexp => SchemaList: {} , TableList: {}", schemaNameList, tableRegList);
+ throw new Exception("Could not find any related tables.");
+ }
logger.info("A total of {} tables were detected...", schemaTableNameList.size());
for (int i = 0; i < schemaTableNameList.size(); i++) {
logger.info("{}: {}", i + 1, schemaTableNameList.get(i));
diff --git a/dinky-core/src/test/java/org/dinky/job/JobManagerTest.java b/dinky-core/src/test/java/org/dinky/job/JobManagerTest.java
index ace3b28d94..9e2ecc4c63 100644
--- a/dinky-core/src/test/java/org/dinky/job/JobManagerTest.java
+++ b/dinky-core/src/test/java/org/dinky/job/JobManagerTest.java
@@ -21,6 +21,7 @@
import static org.junit.jupiter.api.Assertions.*;
+import com.google.common.io.Resources;
import org.dinky.data.enums.GatewayType;
import org.dinky.data.result.ExplainResult;
import org.dinky.executor.ExecutorConfig;
@@ -28,7 +29,6 @@
import org.dinky.explainer.lineage.LineageResult;
import org.apache.commons.io.IOUtils;
-import org.apache.flink.shaded.guava31.com.google.common.io.Resources;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
diff --git a/dinky-flink/dinky-flink-1.16/pom.xml b/dinky-flink/dinky-flink-1.16/pom.xml
index 757b36687f..47b9695f3f 100644
--- a/dinky-flink/dinky-flink-1.16/pom.xml
+++ b/dinky-flink/dinky-flink-1.16/pom.xml
@@ -49,7 +49,7 @@
org.apache.flink
flink-connector-jdbc
- ${flink.version}
+ 3.1-SNAPSHOT-${flink.version}
org.apache.flink
diff --git a/dinky-flink/dinky-flink-1.20/pom.xml b/dinky-flink/dinky-flink-1.20/pom.xml
index 78710cecc4..fce821ba70 100644
--- a/dinky-flink/dinky-flink-1.20/pom.xml
+++ b/dinky-flink/dinky-flink-1.20/pom.xml
@@ -20,6 +20,8 @@
19.0
1.20.0
3.2.0
+ 2.3-SNAPSHOT-${flink.version}
+ 3.1-SNAPSHOT-${flink.version}
@@ -61,6 +63,18 @@
org.slf4j
slf4j-api
+
+ org.scala-lang
+ scala-compiler
+
+
+ org.scala-lang
+ scala-library
+
+
+ org.scala-lang
+ scala-reflect
+
@@ -76,7 +90,17 @@
org.apache.flink
flink-connector-jdbc
- 3.2.0-1.19
+ ${flinkjdbc.version}
+
+
+ org.apache.flink
+ flink-connector-jdbc-dm
+ ${flinkjdbc.version}
+
+
+ org.apache.flink
+ flink-connector-jdbc-kingbase
+ ${flinkjdbc.version}
org.apache.flink
@@ -87,6 +111,20 @@
org.apache.flink
flink-table-api-scala-bridge_2.12
${flink.version}
+
+
+ org.scala-lang
+ scala-compiler
+
+
+ org.scala-lang
+ scala-library
+
+
+ org.scala-lang
+ scala-reflect
+
+
org.apache.flink
@@ -112,7 +150,7 @@
org.apache.flink
flink-connector-kafka
- 3.2.0-1.19
+ 3.4-SNAPSHOT
org.apache.doris
@@ -125,35 +163,57 @@
32.1.3-jre-${flink.shaded.version}
- org.apache.flink
+ com.ververica
flink-sql-connector-mysql-cdc
- ${flinkcdc.version}
+ ${flinkcdc2.version}
+ jar
- org.apache.flink
+ com.ververica
flink-sql-connector-oracle-cdc
- ${flinkcdc.version}
+ ${flinkcdc2.version}
+ jar
- org.apache.flink
+ com.ververica
flink-sql-connector-sqlserver-cdc
- ${flinkcdc.version}
+ ${flinkcdc2.version}
+ jar
- org.apache.flink
+ com.ververica
flink-sql-connector-postgres-cdc
- ${flinkcdc.version}
+ ${flinkcdc2.version}
+ jar
+
+
+ com.ververica
+ flink-sql-connector-kingbase-cdc
+ ${flinkcdc2.version}
+ jar
+
+
+ com.ververica
+ flink-sql-connector-dm-cdc
+ ${flinkcdc2.version}
+ jar
org.apache.flink
flink-cdc-cli
${flinkcdc.version}
+
+
+ org.apache.flink
+ flink-kubernetes
+
+
-
+
org.apache.flink
flink-cdc-pipeline-connector-doris
diff --git a/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java b/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java
index ed48f4d9db..b1bc66144f 100644
--- a/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java
+++ b/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java
@@ -19,12 +19,14 @@
package org.dinky.function;
+import cn.hutool.core.util.StrUtil;
import org.dinky.function.compiler.FunctionCompiler;
import org.dinky.function.compiler.FunctionPackage;
import org.dinky.function.data.model.UDF;
import org.dinky.function.data.model.UDFPath;
import org.apache.flink.configuration.Configuration;
+import org.dinky.function.exception.UDFCompilerException;
import java.util.List;
@@ -42,8 +44,10 @@ public static UDFPath initUDF(List udfClassList, Integer taskId) {
public static void initUDF(UDF udf, Integer taskId) {
// 编译
- FunctionCompiler.getCompiler(udf, new Configuration(), taskId);
-
+ if(!FunctionCompiler.getCompiler(udf, new Configuration(), taskId)){
+ throw new UDFCompilerException(StrUtil.format(
+ "codeLanguage:{} , className:{} 编译失败", udf.getFunctionLanguage(), udf.getClassName()));
+ }
// 打包
FunctionPackage.bale(udf, taskId);
}
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/CompilerOptions.java b/dinky-function/src/main/java/org/dinky/function/compiler/CompilerOptions.java
new file mode 100644
index 0000000000..63d7ac076c
--- /dev/null
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/CompilerOptions.java
@@ -0,0 +1,46 @@
+package org.dinky.function.compiler;
+
+import sun.misc.SharedSecrets;
+import sun.misc.URLClassPath;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.net.URLClassLoader;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Author: lwjhn
+ * Date: 2024/6/20 15:29
+ * Description:
+ */
+public class CompilerOptions {
+ private static final Iterable options ;
+
+ private static final String classpath;
+
+ static {
+ try {
+ Field field = URLClassPath.class.getDeclaredField("lmap");
+ field.setAccessible(true);
+
+ URLClassPath urlClassPath = SharedSecrets.getJavaNetAccess().getURLClassPath((URLClassLoader) CompilerOptions.class.getClassLoader());
+
+ options = Collections.unmodifiableList(Arrays.asList("-encoding", "UTF-8", "-classpath",
+ classpath = String.join(File.pathSeparator, ((Map) field.get(urlClassPath)).keySet()
+ .stream().map(o->o.replaceAll("^file://", "")).collect(Collectors.toSet()))));
+ } catch (NoSuchFieldException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public static Iterable getOptions() {
+ return options;
+ }
+
+ public static String getClasspath() {
+ return classpath;
+ }
+}
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java b/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java
index 0d4d084c4f..083224f9f0 100644
--- a/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringJavaCompiler.java
@@ -24,6 +24,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
+import java.net.URLClassLoader;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -85,25 +86,21 @@ public CustomStringJavaCompiler(String sourceCode) {
*/
public boolean compilerToTmpPath(String tmpPath) {
long startTime = System.currentTimeMillis();
- File codeFile =
- FileUtil.writeUtf8String(sourceCode, tmpPath + StrUtil.replace(fullClassName, ".", "/") + ".java");
+ File codeFile = FileUtil.writeUtf8String(sourceCode, tmpPath + StrUtil.replace(fullClassName, ".", "/") + ".java");
// 标准的内容管理器,更换成自己的实现,覆盖部分方法
- StandardJavaFileManager standardFileManager = compiler.getStandardFileManager(diagnosticsCollector, null, null);
- try {
- standardFileManager.setLocation(
- StandardLocation.CLASS_OUTPUT, Collections.singletonList(new File(tmpPath)));
+ try (StandardJavaFileManager standardFileManager = compiler.getStandardFileManager(diagnosticsCollector, null, null)) {
+ // standardFileManager.setLocation(StandardLocation.CLASS_OUTPUT, Collections.singletonList(new File(tmpPath)));
+ Iterable extends JavaFileObject> javaFileObject =
+ standardFileManager.getJavaFileObjectsFromFiles(Collections.singletonList(codeFile));
+ Iterable options = CompilerOptions.getOptions();
+ JavaCompiler.CompilationTask task = compiler.getTask(null, standardFileManager, diagnosticsCollector, options, null,
+ javaFileObject);
+ // 设置编译耗时
+ compilerTakeTime = System.currentTimeMillis() - startTime;
+ return task.call();
} catch (IOException e) {
throw new RuntimeException(e);
}
- Iterable extends JavaFileObject> javaFileObject =
- standardFileManager.getJavaFileObjectsFromFiles(Collections.singletonList(codeFile));
-
- // 获取一个编译任务
- JavaCompiler.CompilationTask task =
- compiler.getTask(null, standardFileManager, diagnosticsCollector, null, null, javaFileObject);
- // 设置编译耗时
- compilerTakeTime = System.currentTimeMillis() - startTime;
- return task.call();
}
/** @return 编译信息(错误 警告) */
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringScalaCompiler.java b/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringScalaCompiler.java
index 37f032ea42..564a29d18b 100644
--- a/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringScalaCompiler.java
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/CustomStringScalaCompiler.java
@@ -46,7 +46,8 @@ public static IMain getInterpreter() {
GenericRunnerSettings settings = new GenericRunnerSettings(new ErrorHandler());
- settings.usejavacp().tryToSetFromPropertyValue("true");
+ settings.usejavacp().tryToSetFromPropertyValue("false");
+ settings.classpath().append(CompilerOptions.getClasspath());
settings.Yreploutdir().tryToSetFromPropertyValue(PathConstant.getUdfCompilerPath(FunctionLanguage.JAVA));
return new IMain(settings);
}
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/FunctionCompiler.java b/dinky-function/src/main/java/org/dinky/function/compiler/FunctionCompiler.java
index 4661b8d875..9b77701333 100644
--- a/dinky-function/src/main/java/org/dinky/function/compiler/FunctionCompiler.java
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/FunctionCompiler.java
@@ -19,29 +19,25 @@
package org.dinky.function.compiler;
-import org.dinky.function.data.model.UDF;
-import org.dinky.function.exception.UDFCompilerException;
-
+import cn.hutool.core.lang.Singleton;
+import cn.hutool.core.util.StrUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ReadableConfig;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import org.dinky.assertion.Asserts;
+import org.dinky.function.data.model.UDF;
+import org.dinky.function.exception.UDFCompilerException;
+import org.dinky.function.pool.UdfCodePool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import cn.hutool.core.lang.Singleton;
-import cn.hutool.core.util.StrUtil;
+import java.util.List;
+import java.util.Map;
/** @since 0.6.8 */
public interface FunctionCompiler {
Logger log = LoggerFactory.getLogger(FunctionCompiler.class);
- Set COMPILER_CACHE = new HashSet<>();
-
+ // Set COMPILER_CACHE = new HashSet<>();
/**
* 函数代码在线动态编译
*
@@ -64,10 +60,10 @@ static boolean getCompilerByTask(UDF udf, Map conf, Integer task
* @return 编译状态
*/
static boolean getCompiler(UDF udf, ReadableConfig conf, Integer taskId) {
- log.info("Compiled UDF: {},; Language: {}", udf.getClassName(), udf.getFunctionLanguage());
-
- String key = udf.getClassName() + udf.getFunctionLanguage();
- if (COMPILER_CACHE.contains(key)) {
+ Asserts.checkNull(udf, "udf为空");
+ Asserts.checkNull(udf.getCode(), "udf 代码为空");
+ UDF cache = UdfCodePool.getUDF(udf.getClassName());
+ if (cache!=null && udf.getCode().equals(cache.getCode())) {
return true;
}
boolean success;
@@ -86,7 +82,7 @@ static boolean getCompiler(UDF udf, ReadableConfig conf, Integer taskId) {
udf.getFunctionLanguage().name());
}
if (success) {
- COMPILER_CACHE.add(key);
+ UdfCodePool.addOrUpdate(udf); // COMPILER_CACHE.add(key);
}
return success;
}
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java b/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java
index e66d78640c..c7791ce7e1 100644
--- a/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/JavaCompiler.java
@@ -19,13 +19,11 @@
package org.dinky.function.compiler;
-import org.dinky.function.constant.PathConstant;
-import org.dinky.function.data.model.UDF;
-
+import lombok.extern.slf4j.Slf4j;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.table.catalog.FunctionLanguage;
-
-import lombok.extern.slf4j.Slf4j;
+import org.dinky.function.constant.PathConstant;
+import org.dinky.function.data.model.UDF;
/**
* java 编译
@@ -56,7 +54,8 @@ public synchronized boolean compiler(UDF udf, ReadableConfig conf, Integer taskI
} else {
log.error("class compilation failed:{}", className);
log.error(compiler.getCompilerMessage());
- return false;
+ throw new RuntimeException(compiler.getCompilerMessage());
+ // return false;
}
}
}
diff --git a/dinky-function/src/main/java/org/dinky/function/compiler/PythonFunction.java b/dinky-function/src/main/java/org/dinky/function/compiler/PythonFunction.java
index dbc0616f44..7e622ec883 100644
--- a/dinky-function/src/main/java/org/dinky/function/compiler/PythonFunction.java
+++ b/dinky-function/src/main/java/org/dinky/function/compiler/PythonFunction.java
@@ -91,7 +91,8 @@ public boolean compiler(UDF udf, ReadableConfig conf, Integer taskId) {
"Python udf compilation failed; className:{}\n.reason: {}",
udf.getClassName(),
ExceptionUtil.getRootCauseMessage(e));
- return false;
+ throw new RuntimeException(ExceptionUtil.getRootCauseMessage(e));
+ // return false;
}
FileUtil.del(zipFile);
return true;
diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java
index a41e8d1fef..dabefd0899 100644
--- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java
+++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java
@@ -19,11 +19,13 @@
package org.dinky.function.util;
+import cn.hutool.extra.spring.SpringUtil;
import org.dinky.assertion.Asserts;
import org.dinky.classloader.DinkyClassLoader;
import org.dinky.config.Dialect;
import org.dinky.context.FlinkUdfPathContextHolder;
import org.dinky.data.enums.GatewayType;
+import org.dinky.data.enums.JobLifeCycle;
import org.dinky.data.exception.DinkyException;
import org.dinky.data.model.FlinkUdfManifest;
import org.dinky.data.model.SystemConfiguration;
@@ -48,6 +50,7 @@
import java.io.File;
import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
@@ -335,6 +338,16 @@ public static UDF toUDF(String statement, DinkyClassLoader classLoader) {
}
UDF udf = UdfCodePool.getUDF(className);
+ if(udf == null) {
+ // add or update udf pool, 发布任务时,内存中没有UDF可以找到相关已发布UDF类编译,没有才返回错误
+ Object o = SpringUtil.getBean("taskServiceImpl");
+ try {
+ udf = (UDF) o.getClass().getDeclaredMethod("addOrUpdateUdfCodePool", String.class)
+ .invoke(o, className);
+ } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
if (udf != null) {
return UDF.builder()
.name(udfName)
@@ -357,6 +370,8 @@ public static UDF toUDF(String statement, DinkyClassLoader classLoader) {
return null;
}
+
+
// create FlinkUdfPathContextHolder from UdfCodePool
public static FlinkUdfPathContextHolder createFlinkUdfPathContextHolder() {
FlinkUdfPathContextHolder udfPathContextHolder = new FlinkUdfPathContextHolder();
@@ -456,8 +471,12 @@ public static void writeManifest(
.map(URLUtil::getURL)
.collect(Collectors.toList()));
- FileUtil.writeUtf8String(
+ File file = FileUtil.writeUtf8String(
JsonUtils.toJsonString(flinkUdfManifest),
PathConstant.getUdfPackagePath(taskId) + PathConstant.DEP_MANIFEST);
+
+ // 设置所有人可读可写
+ file.setReadable(true, false);
+ file.setWritable(true, false);
}
}
diff --git a/dinky-metadata/dinky-metadata-dm/pom.xml b/dinky-metadata/dinky-metadata-dm/pom.xml
new file mode 100644
index 0000000000..4ba4e5c6fa
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/pom.xml
@@ -0,0 +1,52 @@
+
+
+
+ 4.0.0
+
+ org.dinky
+ dinky-metadata
+ ${revision}
+ ../pom.xml
+
+
+ dinky-metadata-dm
+
+ ${target.java.version}
+ ${target.java.version}
+
+ jar
+
+
+
+ org.dinky
+ dinky-metadata-base
+
+
+ junit
+ junit
+ provided
+
+
+ dm.jdbc
+ Dm7JdbcDriver17
+ 7.1.6
+
+
+
+
diff --git a/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/convert/DmTypeConvert.java b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/convert/DmTypeConvert.java
new file mode 100644
index 0000000000..5331056e49
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/convert/DmTypeConvert.java
@@ -0,0 +1,192 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.convert;
+
+import org.dinky.assertion.Asserts;
+import org.dinky.data.enums.ColumnType;
+import org.dinky.data.model.Column;
+import org.dinky.metadata.config.AbstractJdbcConfig;
+import org.dinky.metadata.config.DriverConfig;
+
+/**
+ * MySqlTypeConvert
+ *
+ * @author wenmo
+ * @since 2021/7/20 15:21
+ **/
+public class DmTypeConvert implements ITypeConvert {
+
+ @Override
+ public ColumnType convert(Column column) {
+ ColumnType columnType = ColumnType.STRING;
+ if (Asserts.isNull(column)) {
+ return columnType;
+ }
+ Integer length = Asserts.isNull(column.getLength()) ? 0 : column.getLength();
+ String t = Asserts.isNull(column.getType()) ? "" : column.getType().toLowerCase();
+ boolean isNullable = !column.isKeyFlag() && column.isNullable();
+ if (t.contains("numeric") || t.contains("decimal")) {
+ columnType = ColumnType.DECIMAL;
+ } else if (t.contains("bigint")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_LONG;
+ } else {
+ columnType = ColumnType.LONG;
+ }
+ } else if (t.contains("float")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_FLOAT;
+ } else {
+ columnType = ColumnType.FLOAT;
+ }
+ } else if (t.contains("double")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_DOUBLE;
+ } else {
+ columnType = ColumnType.DOUBLE;
+ }
+ } else if (t.contains("boolean") || (t.contains("tinyint") && length.equals(1)) || t.contains("bit")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_BOOLEAN;
+ } else {
+ columnType = ColumnType.BOOLEAN;
+ }
+ } else if (t.contains("datetime")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("date")) {
+ columnType = ColumnType.DATE;
+ } else if (t.contains("timestamp")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("time")) {
+ columnType = ColumnType.TIME;
+ } else if (t.contains("char") || t.contains("text")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("binary") || t.contains("blob")) {
+ columnType = ColumnType.BYTES;
+ } else if (t.contains("tinyint") || t.contains("mediumint") || t.contains("smallint") || t.contains("int")) {
+ if (isNullable) {
+ columnType = ColumnType.INTEGER;
+ } else {
+ columnType = ColumnType.INT;
+ }
+ }
+ return columnType;
+ }
+
+ @Override
+ public ColumnType convert(Column column, DriverConfig driverConfig) {
+ ColumnType columnType = ColumnType.STRING;
+ if (Asserts.isNull(column)) {
+ return columnType;
+ }
+ Integer length = Asserts.isNull(column.getLength()) ? 0 : column.getLength();
+ String t = Asserts.isNull(column.getType()) ? "" : column.getType().toLowerCase();
+ boolean isNullable = !column.isKeyFlag() && column.isNullable();
+ boolean tinyInt1isBit =
+ Asserts.isNotNullString(driverConfig.getConnectConfig().getUrl())
+ && !driverConfig.getConnectConfig().getUrl().contains("tinyInt1isBit=false");
+ if (t.contains("numeric") || t.contains("decimal")) {
+ columnType = ColumnType.DECIMAL;
+ } else if (t.contains("bigint")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_LONG;
+ } else {
+ columnType = ColumnType.LONG;
+ }
+ } else if (t.contains("float")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_FLOAT;
+ } else {
+ columnType = ColumnType.FLOAT;
+ }
+ } else if (t.contains("double")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_DOUBLE;
+ } else {
+ columnType = ColumnType.DOUBLE;
+ }
+ } else if (t.contains("boolean") || (tinyInt1isBit && t.contains("tinyint") && length.equals(1))
+ || t.contains("bit")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_BOOLEAN;
+ } else {
+ columnType = ColumnType.BOOLEAN;
+ }
+ } else if (t.contains("datetime")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("date")) {
+ columnType = ColumnType.DATE;
+ } else if (t.contains("timestamp")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("time")) {
+ columnType = ColumnType.TIME;
+ } else if (t.contains("char") || t.contains("text")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("binary") || t.contains("blob")) {
+ columnType = ColumnType.BYTES;
+ } else if (t.contains("tinyint") || t.contains("mediumint") || t.contains("smallint") || t.contains("int")) {
+ if (isNullable) {
+ columnType = ColumnType.INTEGER;
+ } else {
+ columnType = ColumnType.INT;
+ }
+ }
+ return columnType;
+ }
+
+ @Override
+ public String convertToDB(ColumnType columnType) {
+ switch (columnType) {
+ case STRING:
+ return "varchar";
+ case BYTE:
+ return "tinyint";
+ case SHORT:
+ return "smallint";
+ case DECIMAL:
+ return "decimal";
+ case LONG:
+ case JAVA_LANG_LONG:
+ return "bigint";
+ case FLOAT:
+ case JAVA_LANG_FLOAT:
+ return "float";
+ case DOUBLE:
+ case JAVA_LANG_DOUBLE:
+ return "double";
+ case BOOLEAN:
+ case JAVA_LANG_BOOLEAN:
+ return "boolean";
+ case TIMESTAMP:
+ return "datetime";
+ case DATE:
+ return "date";
+ case TIME:
+ return "time";
+ case BYTES:
+ return "binary";
+ case INTEGER:
+ case INT:
+ return "int";
+ default:
+ return "varchar";
+ }
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/driver/DmDriver.java b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/driver/DmDriver.java
new file mode 100644
index 0000000000..aa88c83e1b
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/driver/DmDriver.java
@@ -0,0 +1,239 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.driver;
+
+import lombok.extern.slf4j.Slf4j;
+import org.dinky.assertion.Asserts;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.QueryData;
+import org.dinky.data.model.Table;
+import org.dinky.metadata.convert.DmTypeConvert;
+import org.dinky.metadata.convert.ITypeConvert;
+import org.dinky.metadata.query.DmQuery;
+import org.dinky.metadata.query.IDBQuery;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+/**
+ * MysqlDriver
+ *
+ * @author wenmo
+ * @since 2021/7/20 14:06
+ **/
+@Slf4j
+public class DmDriver extends AbstractJdbcDriver {
+
+ @Override
+ public IDBQuery getDBQuery() {
+ return new DmQuery();
+ }
+
+ @Override
+ public ITypeConvert getTypeConvert() {
+ return new DmTypeConvert();
+ }
+
+ @Override
+ public String getType() {
+ return "dm";
+ }
+
+ @Override
+ public String getName() {
+ return "dm数据库";
+ }
+
+ @Override
+ public String getDriverClass() {
+ return "dm.jdbc.driver.DmDriver";
+ }
+
+ @Override
+ public Map getFlinkColumnTypeConversion() {
+ HashMap map = new HashMap<>();
+ map.put("VARCHAR", "STRING");
+ map.put("TEXT", "STRING");
+ map.put("INT", "INT");
+ map.put("DATETIME", "TIMESTAMP");
+ return map;
+ }
+
+ @Override
+ public String generateCreateTableSql(Table table) {
+ String genTableSql = genTable(table);
+ log.info("Auto generateCreateTableSql {}", genTableSql);
+ return genTableSql;
+ }
+
+ @Override
+ public String getCreateTableSql(Table table) {
+ return genTable(table);
+ }
+
+ public String genTable(Table table) {
+ StringBuilder key = new StringBuilder();
+ StringBuilder sb = new StringBuilder();
+
+ sb.append("CREATE TABLE IF NOT EXISTS ")
+ .append("\"")
+ .append(table.getSchema())
+ .append("\".\"")
+ .append(table.getName())
+ .append("\"")
+ .append(" (\n");
+ for (int i = 0; i < table.getColumns().size(); i++) {
+ Column column = table.getColumns().get(i);
+ sb.append(" \"").append(column.getName()).append("\" ");
+ if (Asserts.isNotNullString(column.getType())
+ && column.getType().toLowerCase().contains("unsigned")) {
+ sb.append(column.getType().replaceAll("(?i)unsigned", "(" + column.getLength() + ") unsigned"));
+ } else {
+ // 处理浮点类型
+ sb.append(column.getType());
+ if (column.getPrecision() > 0 && column.getScale() > 0) {
+ sb.append("(")
+ .append(column.getLength())
+ .append(",").append(column.getScale())
+ .append(")");
+ } else if (null != column.getLength()) { // 处理字符串类型和数值型
+ sb.append("(").append(column.getLength()).append(")");
+ }
+ }
+ if (Asserts.isNotNull(column.getDefaultValue())) {
+ if ("".equals(column.getDefaultValue())) {
+ // 调整双引号为单引号
+ sb.append(" DEFAULT ").append("''");
+ } else {
+ // 如果存在默认值,且数据类型不为 datetime/datetime(x)/timestamp/timestamp(x) 类型,应该使用单引号!
+ if (column.getType().toLowerCase().startsWith("datetime")
+ || column.getType().toLowerCase().startsWith("timestamp")) {
+ sb.append(" DEFAULT ").append(column.getDefaultValue());
+ } else {
+ sb.append(" DEFAULT ").append("'").append(column.getDefaultValue()).append("'");
+ }
+ }
+ } else {
+ if (!column.isNullable()) {
+ sb.append(" NOT ");
+ }
+ sb.append(" NULL ");
+ }
+ if (column.isAutoIncrement()) {
+ sb.append(" AUTO_INCREMENT ");
+ }
+ if (Asserts.isNotNullString(column.getComment())) {
+ sb.append(" COMMENT '").append(column.getComment()).append("'");
+ }
+ if (column.isKeyFlag()) {
+ key.append("\"").append(column.getName()).append("\",");
+ }
+ if (i < table.getColumns().size() || key.length() > 0) {
+ sb.append(",");
+ }
+ sb.append("\n");
+ }
+
+ if (key.length() > 0) {
+ sb.append(" PRIMARY KEY (");
+ sb.append(key.substring(0, key.length() - 1));
+ sb.append(")\n");
+ }
+
+ // sb.append(")\n ENGINE=").append(table.getEngine());
+ sb.append(")\n");
+ if (Asserts.isNotNullString(table.getOptions())) {
+ sb.append(" ").append(table.getOptions());
+ }
+
+ if (Asserts.isNotNullString(table.getComment())) {
+ sb.append(" COMMENT='").append(table.getComment()).append("'");
+ }
+ sb.append(";");
+ return sb.toString();
+ }
+
+ @Override
+ public StringBuilder genQueryOption(QueryData queryData) {
+
+ String where = queryData.getOption().getWhere();
+ String order = queryData.getOption().getOrder();
+ int limitStart = queryData.getOption().getLimitStart();
+ int limitEnd = queryData.getOption().getLimitEnd();
+
+ StringBuilder optionBuilder = new StringBuilder()
+ .append("select * from ")
+ .append("\"")
+ .append(queryData.getSchemaName())
+ .append("\"")
+ .append(".")
+ .append("\"")
+ .append(queryData.getTableName())
+ .append("\"");
+
+ if (where != null && !where.equals("")) {
+ optionBuilder.append(" where ").append(where);
+ }
+ if (order != null && !order.equals("")) {
+ optionBuilder.append(" order by ").append(order);
+ }
+
+// if (TextUtil.isEmpty(limitStart)) {
+// limitStart = "0";
+// }
+// if (TextUtil.isEmpty(limitEnd)) {
+// limitEnd = "100";
+// }
+ optionBuilder.append(" limit ")
+ .append(limitStart)
+ .append(",")
+ .append(limitEnd);
+
+ return optionBuilder;
+ }
+
+ @Override
+ public String getSqlSelect(Table table) {
+ List columns = table.getColumns();
+ StringBuilder sb = new StringBuilder("SELECT\n");
+ for (int i = 0; i < columns.size(); i++) {
+ sb.append(" ");
+ if (i > 0) {
+ sb.append(",");
+ }
+ String columnComment = columns.get(i).getComment();
+ if (Asserts.isNotNullString(columnComment)) {
+ if (columnComment.contains("'") | columnComment.contains("\"")) {
+ columnComment = columnComment.replaceAll("\"|'", "");
+ }
+ sb.append("\"").append(columns.get(i).getName()).append("\" -- ").append(columnComment).append(" \n");
+ } else {
+ sb.append("\"").append(columns.get(i).getName()).append("\" \n");
+ }
+ }
+ if (Asserts.isNotNullString(table.getComment())) {
+ sb.append(" FROM \"").append(table.getSchema()).append("\".\"").append(table.getName()).append("\";")
+ .append(" -- ").append(table.getComment()).append("\n");
+ } else {
+ sb.append(" FROM \"").append(table.getSchema()).append("\".\"").append(table.getName()).append("\";\n");
+ }
+ return sb.toString();
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/query/DmQuery.java b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/query/DmQuery.java
new file mode 100644
index 0000000000..7d7046481f
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/src/main/java/org/dinky/metadata/query/DmQuery.java
@@ -0,0 +1,113 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.query;
+
+/**
+ * MySqlQuery
+ *
+ * @author wenmo
+ * @since 2021/7/20 14:01
+ **/
+public class DmQuery extends AbstractDBQuery {
+
+ @Override
+ public String schemaAllSql() {
+ return "SELECT DISTINCT OWNER FROM ALL_TAB_COMMENTS";
+ }
+
+ @Override
+ public String tablesSql(String schemaName) {
+ return "SELECT * FROM ALL_TAB_COMMENTS WHERE OWNER='" + schemaName + "'";
+ }
+
+ @Override
+ public String tablesSql(String schemaName, String tableName) {
+ return "SELECT * FROM ALL_TAB_COMMENTS WHERE OWNER='" + schemaName + "' AND TABLE_NAME='" + tableName + "'";
+ }
+
+ @Override
+ public String columnsSql(String schemaName, String tableName) {
+ return "SELECT A.DATA_LENGTH AS \"LENGTH\", A.COLUMN_NAME, CASE WHEN A.DATA_TYPE='NUMBER' THEN "
+ + "(CASE WHEN A.DATA_PRECISION IS NULL THEN A.DATA_TYPE "
+ + "WHEN NVL(A.DATA_SCALE, 0) > 0 THEN A.DATA_TYPE||'('||A.DATA_PRECISION||','||A.DATA_SCALE||')' "
+ + "ELSE A.DATA_TYPE||'('||A.DATA_PRECISION||')' END) "
+ + "ELSE A.DATA_TYPE END DATA_TYPE,A.DATA_PRECISION NUMERIC_PRECISION,A.DATA_SCALE NUMERIC_SCALE,"
+ + " B.COMMENTS,A.NULLABLE,DECODE((select count(1) from all_constraints pc,all_cons_columns pcc"
+ + " where pcc.column_name = A.column_name"
+ + " and pcc.constraint_name = pc.constraint_name"
+ + " and pc.constraint_type ='P'"
+ + " and pcc.owner = upper(A.OWNER)"
+ + " and pcc.table_name = upper(A.TABLE_NAME)),0,'','PRI') KEY "
+ + "FROM ALL_TAB_COLUMNS A "
+ + " INNER JOIN ALL_COL_COMMENTS B ON A.TABLE_NAME = B.TABLE_NAME AND A.COLUMN_NAME = B.COLUMN_NAME "
+// + "AND B.OWNER = '" + schemaName + "'"
+ + " AND B.SCHEMA_NAME='" + schemaName + "'" + " LEFT JOIN ALL_CONSTRAINTS D ON D.TABLE_NAME = A.TABLE_NAME AND D.CONSTRAINT_TYPE = 'P' AND D.OWNER = '" + schemaName + "'" + " LEFT JOIN ALL_CONS_COLUMNS C ON C.CONSTRAINT_NAME = D.CONSTRAINT_NAME AND C.COLUMN_NAME=A.COLUMN_NAME AND C.OWNER = '" + schemaName + "'" + "WHERE A.OWNER = '" + schemaName + "' AND A.TABLE_NAME = '" + tableName + "' ORDER BY A.COLUMN_ID ";
+ }
+
+ @Override
+ public String schemaName() {
+ return "OWNER";
+ }
+
+ @Override
+ public String tableName() {
+ return "TABLE_NAME";
+ }
+
+ @Override
+ public String tableComment() {
+ return "COMMENTS";
+ }
+
+ @Override
+ public String tableType() {
+ return "TABLE_TYPE";
+ }
+
+ @Override
+ public String columnName() {
+ return "COLUMN_NAME";
+ }
+
+ @Override
+ public String columnType() {
+ return "DATA_TYPE";
+ }
+
+ @Override
+ public String columnComment() {
+ return "COMMENTS";
+ }
+
+ @Override
+ public String columnKey() {
+ return "KEY";
+ }
+
+ @Override
+ public String isNullable() {
+ return "NULLABLE";
+ }
+
+ @Override
+ public String nullableValue() {
+ return "Y";
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-dm/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver b/dinky-metadata/dinky-metadata-dm/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
new file mode 100644
index 0000000000..6eba05ab76
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
@@ -0,0 +1 @@
+org.dinky.metadata.driver.DmDriver
\ No newline at end of file
diff --git a/dinky-metadata/dinky-metadata-dm/src/test/java/org/dinky/metadata/DmTest.java b/dinky-metadata/dinky-metadata-dm/src/test/java/org/dinky/metadata/DmTest.java
new file mode 100644
index 0000000000..602e47bae1
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-dm/src/test/java/org/dinky/metadata/DmTest.java
@@ -0,0 +1,92 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata;
+
+import org.dinky.metadata.config.AbstractJdbcConfig;
+import org.dinky.metadata.config.DriverConfig;
+import org.dinky.metadata.driver.Driver;
+import org.dinky.metadata.result.JdbcSelectResult;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.Schema;
+import org.dinky.data.model.Table;
+
+import java.util.List;
+import java.util.UUID;
+
+import org.junit.Test;
+
+/**
+ * MysqlTest
+ *
+ * @author wenmo
+ * @since 2021/7/20 15:32
+ **/
+public class DmTest {
+
+ private static final String IP = "192.168.210.77";
+
+ public Driver getDriver() {
+ DriverConfig config = new DriverConfig<>();
+ config.setName(UUID.randomUUID().toString());
+ config.setType("dm");
+ config.setConnectConfig(AbstractJdbcConfig.builder()
+ .ip(IP)
+ .port(5236)
+ .username("SFTOA")
+ .password("123456789")
+ .url("jdbc:dm://" + IP
+ + ":5236/SFTOA?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&autoReconnect=true")
+ .build());
+ return Driver.build(config);
+ }
+
+ @Test
+ public void connectTest() {
+ String test = getDriver().test();
+ System.out.println(test);
+ System.out.println("end...");
+ }
+
+ @Test
+ public void schemaTest() {
+ Driver driver = getDriver();
+ List schemasAndTables = driver.getSchemasAndTables();
+ Table table = driver.getTable("SFTOA", "EGOV_DISPATCH");
+ String sql = table.getFlinkTableSql("SFTOA", "");
+ String select = driver.getSqlSelect(table);
+ String ddl = driver.getCreateTableSql(table);
+ driver.close();
+ System.out.println("end...");
+ }
+
+ @Test
+ public void columnTest() {
+ Driver driver = getDriver();
+ List columns = driver.listColumns("dca", "MENU");
+ System.out.println("end...");
+ }
+
+ @Test
+ public void queryTest() {
+ Driver driver = getDriver();
+ JdbcSelectResult query = driver.query("select * from MENU", 10);
+ System.out.println("end...");
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-kingbase/pom.xml b/dinky-metadata/dinky-metadata-kingbase/pom.xml
new file mode 100644
index 0000000000..ef57a6673a
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/pom.xml
@@ -0,0 +1,52 @@
+
+
+
+ 4.0.0
+
+ org.dinky
+ dinky-metadata
+ ${revision}
+ ../pom.xml
+
+
+ dinky-metadata-kingbase
+ jar
+
+ ${target.java.version}
+ ${target.java.version}
+
+
+
+
+ org.dinky
+ dinky-metadata-base
+
+
+ junit
+ junit
+ provided
+
+
+ kingbase.jdbc
+ kingbaseJDBC8.6
+ 8.6
+
+
+
+
diff --git a/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/convert/KingbaseTypeConvert.java b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/convert/KingbaseTypeConvert.java
new file mode 100644
index 0000000000..63da828b19
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/convert/KingbaseTypeConvert.java
@@ -0,0 +1,134 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.convert;
+
+import org.dinky.assertion.Asserts;
+import org.dinky.data.enums.ColumnType;
+import org.dinky.data.model.Column;
+import org.dinky.metadata.config.AbstractJdbcConfig;
+
+/**
+ * MySqlTypeConvert
+ *
+ * @author wenmo
+ * @since 2021/7/20 15:21
+ **/
+public class KingbaseTypeConvert implements ITypeConvert {
+
+ @Override
+ public ColumnType convert(Column column) {
+ ColumnType columnType = ColumnType.STRING;
+ if (Asserts.isNull(column)) {
+ return columnType;
+ }
+ String t = column.getType().toLowerCase();
+ boolean isNullable = !column.isKeyFlag() && column.isNullable();
+ if (t.contains("smallint") || t.contains("int2") || t.contains("smallserial") || t.contains("serial2")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_SHORT;
+ } else {
+ columnType = ColumnType.SHORT;
+ }
+ } else if (t.contains("integer") || t.contains("int4") || t.contains("serial")) {
+ if (isNullable) {
+ columnType = ColumnType.INTEGER;
+ } else {
+ columnType = ColumnType.INT;
+ }
+ } else if (t.contains("bigint") || t.contains("int8") || t.contains("bigserial")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_LONG;
+ } else {
+ columnType = ColumnType.LONG;
+ }
+ } else if (t.contains("real") || t.contains("float4")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_FLOAT;
+ } else {
+ columnType = ColumnType.FLOAT;
+ }
+ } else if (t.contains("float8") || t.contains("double precision")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_DOUBLE;
+ } else {
+ columnType = ColumnType.DOUBLE;
+ }
+ } else if (t.contains("numeric") || t.contains("decimal")) {
+ columnType = ColumnType.DECIMAL;
+ } else if (t.contains("bool")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_BOOLEAN;
+ } else {
+ columnType = ColumnType.BOOLEAN;
+ }
+ } else if (t.contains("timestamp")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("date")) {
+ columnType = ColumnType.DATE;
+ } else if (t.contains("time")) {
+ columnType = ColumnType.TIME;
+ } else if (t.contains("char") || t.contains("text")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("bytea")) {
+ columnType = ColumnType.BYTES;
+ } else if (t.contains("array")) {
+ columnType = ColumnType.T;
+ }
+ return columnType;
+ }
+
+ @Override
+ public String convertToDB(ColumnType columnType) {
+ switch (columnType) {
+ case SHORT:
+ case JAVA_LANG_SHORT:
+ return "int2";
+ case INTEGER:
+ case INT:
+ return "integer";
+ case LONG:
+ case JAVA_LANG_LONG:
+ return "bigint";
+ case FLOAT:
+ case JAVA_LANG_FLOAT:
+ return "float4";
+ case DOUBLE:
+ case JAVA_LANG_DOUBLE:
+ return "float8";
+ case DECIMAL:
+ return "decimal";
+ case BOOLEAN:
+ case JAVA_LANG_BOOLEAN:
+ return "boolean";
+ case TIMESTAMP:
+ return "timestamp";
+ case DATE:
+ return "date";
+ case TIME:
+ return "time";
+ case BYTES:
+ return "bytea";
+ case T:
+ return "array";
+ default:
+ return "varchar";
+ }
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/driver/KingbaseDriver.java b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/driver/KingbaseDriver.java
new file mode 100644
index 0000000000..638685115a
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/driver/KingbaseDriver.java
@@ -0,0 +1,208 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.driver;
+
+import org.dinky.assertion.Asserts;
+import org.dinky.metadata.convert.ITypeConvert;
+import org.dinky.metadata.convert.KingbaseTypeConvert;
+import org.dinky.metadata.query.IDBQuery;
+import org.dinky.metadata.query.KingbaseQuery;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.QueryData;
+import org.dinky.data.model.Table;
+import org.dinky.utils.TextUtil;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * MysqlDriver
+ *
+ * @author wenmo
+ * @since 2021/7/20 14:06
+ **/
+public class KingbaseDriver extends AbstractJdbcDriver {
+
+ @Override
+ public IDBQuery getDBQuery() {
+ return new KingbaseQuery();
+ }
+
+ @Override
+ public ITypeConvert getTypeConvert() {
+ return new KingbaseTypeConvert();
+ }
+
+ @Override
+ public String getType() {
+ return "kingbase";
+ }
+
+ @Override
+ public String getName() {
+ return "kingbase数据库";
+ }
+
+ @Override
+ public String getDriverClass() {
+ return "com.kingbase8.Driver";
+ }
+
+ @Override
+ public Map getFlinkColumnTypeConversion() {
+ HashMap map = new HashMap<>();
+ map.put("VARCHAR", "STRING");
+ map.put("TEXT", "STRING");
+ map.put("INT", "INT");
+ map.put("DATETIME", "TIMESTAMP");
+ return map;
+ }
+
+ @Override
+ public String generateCreateSchemaSql(String schemaName) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("CREATE SCHEMA ").append(schemaName);
+ return sb.toString();
+ }
+
+ @Override
+ public String getSqlSelect(Table table) {
+ List columns = table.getColumns();
+ StringBuilder sb = new StringBuilder("SELECT\n");
+ for (int i = 0; i < columns.size(); i++) {
+ sb.append(" ");
+ if (i > 0) {
+ sb.append(",");
+ }
+ String columnComment = columns.get(i).getComment();
+ if (Asserts.isNotNullString(columnComment)) {
+ if (columnComment.contains("\'") | columnComment.contains("\"")) {
+ columnComment = columnComment.replaceAll("\"|'", "");
+ }
+ sb.append("\"" + columns.get(i).getName() + "\" -- " + columnComment + " \n");
+ } else {
+ sb.append("\"" + columns.get(i).getName() + "\" \n");
+ }
+ }
+ if (Asserts.isNotNullString(table.getComment())) {
+ sb.append(" FROM \"" + table.getSchema() + "\".\"" + table.getName() + "\";" + " -- " + table.getComment()
+ + "\n");
+ } else {
+ sb.append(" FROM \"" + table.getSchema() + "\".\"" + table.getName() + "\";\n");
+ }
+ return sb.toString();
+ }
+
+ @Override
+ public String getCreateTableSql(Table table) {
+ StringBuilder key = new StringBuilder();
+ StringBuilder sb = new StringBuilder();
+ StringBuilder comments = new StringBuilder();
+
+ sb.append("CREATE TABLE \"").append(table.getSchema()).append("\".\"").append(table.getName())
+ .append("\" (\n");
+
+ for (int i = 0; i < table.getColumns().size(); i++) {
+ Column column = table.getColumns().get(i);
+ if (i > 0) {
+ sb.append(",\n");
+ }
+ sb.append(" \"").append(column.getName()).append("\" ");
+ sb.append(column.getType());
+ if (column.getPrecision() > 0 && column.getScale() > 0) {
+ sb.append("(")
+ .append(column.getPrecision())
+ .append(",").append(column.getScale())
+ .append(")");
+ } else if (null != column.getLength()) {
+ sb.append("(").append(column.getLength()).append(")");
+ }
+ if (!column.isNullable()) {
+ sb.append(" NOT NULL");
+ }
+ if (Asserts.isNotNullString(column.getDefaultValue()) && !column.getDefaultValue().contains("nextval")) {
+ sb.append(" DEFAULT ").append(column.getDefaultValue());
+ }
+
+ if (column.isKeyFlag()) {
+ if (key.length() > 0) {
+ key.append(",");
+ }
+ key.append(column.getName());
+ }
+
+ if (Asserts.isNotNullString(column.getComment())) {
+ comments.append("COMMENT ON COLUMN \"").append(table.getSchema()).append("\".\"")
+ .append(table.getName()).append("\".\"")
+ .append(column.getName()).append("\" IS '").append(column.getComment()).append("';\n");
+ }
+ }
+
+ if (Asserts.isNotNullString(table.getComment())) {
+ comments.append("COMMENT ON TABLE \"").append(table.getSchema()).append("\".\"")
+ .append(table.getName()).append("\" IS '").append(table.getComment()).append("';");
+ }
+ if (key.length() > 0) {
+ sb.append(",\n");
+ sb.append("CONSTRAINT ");
+ sb.append(table.getName());
+ sb.append("_pkey PRIMARY KEY (");
+ sb.append(key);
+ sb.append(")");
+ }
+ sb.append("\n);\n\n").append(comments);
+
+ return sb.toString();
+ }
+
+ @Override
+ public StringBuilder genQueryOption(QueryData queryData) {
+
+ String where = queryData.getOption().getWhere();
+ String order = queryData.getOption().getOrder();
+ int limitStart = queryData.getOption().getLimitStart();
+ int limitEnd = queryData.getOption().getLimitEnd();
+
+ StringBuilder optionBuilder = new StringBuilder()
+ .append("select * from ")
+ .append(queryData.getSchemaName())
+ .append(".")
+ .append(queryData.getTableName());
+
+ if (where != null && !where.equals("")) {
+ optionBuilder.append(" where ").append(where);
+ }
+ if (order != null && !order.equals("")) {
+ optionBuilder.append(" order by ").append(order);
+ }
+
+// if (TextUtil.isEmpty(limitStart)) {
+// limitStart = "0";
+// }
+// if (TextUtil.isEmpty(limitEnd)) {
+// limitEnd = "100";
+// }
+ optionBuilder.append(" limit ")
+ .append(limitEnd);
+
+ return optionBuilder;
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/query/KingbaseQuery.java b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/query/KingbaseQuery.java
new file mode 100644
index 0000000000..9e43022f97
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/src/main/java/org/dinky/metadata/query/KingbaseQuery.java
@@ -0,0 +1,145 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.query;
+
+public class KingbaseQuery extends AbstractDBQuery {
+ @Override
+ public String schemaAllSql() {
+ return "SELECT nspname AS \"schema_name\" FROM pg_namespace WHERE nspname NOT LIKE 'pg_%' AND nspname != 'information_schema' ORDER BY nspname";
+ }
+
+ @Override
+ public String tablesSql(String schemaName) {
+ return "SELECT n.nspname AS schema_name\n"
+ + " , c.relname AS tablename\n"
+ + " , obj_description(c.oid) AS comments\n"
+ + " , c.reltuples as rows\n"
+ + "FROM pg_class c\n"
+ + " LEFT JOIN pg_namespace n ON n.oid = c.relnamespace\n"
+ + "WHERE ((c.relkind = 'r'::\"char\") OR (c.relkind = 'f'::\"char\") OR (c.relkind = 'p'::\"char\"))\n"
+ + " AND n.nspname = '" + schemaName + "'\n"
+ + "ORDER BY n.nspname, tablename";
+ }
+
+ @Override
+ public String tablesSql(String schemaName, String tableName) {
+ return "SELECT n.nspname AS schema_name\n"
+ + " , c.relname AS tablename\n"
+ + " , obj_description(c.oid) AS comments\n"
+ + " , c.reltuples as rows\n"
+ + "FROM pg_class c\n"
+ + " LEFT JOIN pg_namespace n ON n.oid = c.relnamespace\n"
+ + "WHERE ((c.relkind = 'r'::\"char\") OR (c.relkind = 'f'::\"char\") OR (c.relkind = 'p'::\"char\"))\n"
+ + " AND n.nspname = '" + schemaName + "' AND c.relname='" + tableName + "'\n"
+ + "ORDER BY n.nspname, tablename";
+ }
+
+ @Override
+ public String columnsSql(String schemaName, String tableName) {
+ return "SELECT col.column_name as name\n"
+ + " , col.character_maximum_length as length\n"
+ + " , col.is_nullable as is_nullable\n"
+ + " , col.numeric_precision as numeric_precision\n"
+ + " , col.numeric_scale as numeric_scale\n"
+ + " , col.ordinal_position as ordinal_position\n"
+ + " , col.udt_name as type\n"
+ + " , (CASE WHEN (SELECT COUNT(*) FROM pg_constraint AS PC WHERE b.attnum = PC.conkey[1] AND PC.contype = 'p' and PC.conrelid = c.oid) > 0 \n"
+ + "THEN 'PRI' ELSE '' END) AS key\n"
+ + " , col_description(c.oid, col.ordinal_position) AS comment\n"
+ + " , col.column_default AS column_default\n"
+ + "FROM information_schema.columns AS col\n"
+ + " LEFT JOIN pg_namespace ns ON ns.nspname = col.table_schema\n"
+ + " LEFT JOIN pg_class c ON col.table_name = c.relname AND c.relnamespace = ns.oid\n"
+ + " LEFT JOIN pg_attribute b ON b.attrelid = c.oid AND b.attname = col.column_name\n"
+ + "WHERE col.table_schema = '" + schemaName + "'\n"
+ + " AND col.table_name = '" + tableName + "'\n"
+ + "ORDER BY col.table_schema, col.table_name, col.ordinal_position";
+ }
+
+ @Override
+ public String schemaName() {
+ return "SCHEMA_NAME";
+ }
+
+ @Override
+ public String tableName() {
+ return "TABLENAME";
+ }
+
+ @Override
+ public String tableComment() {
+ return "COMMENTS";
+ }
+
+ @Override
+ public String rows() {
+ return "ROWS";
+ }
+
+ @Override
+ public String columnName() {
+ return "NAME";
+ }
+
+ @Override
+ public String columnType() {
+ return "TYPE";
+ }
+
+ @Override
+ public String columnLength() {
+ return "LENGTH";
+ }
+
+ @Override
+ public String columnComment() {
+ return "COMMENT";
+ }
+
+ @Override
+ public String columnKey() {
+ return "KEY";
+ }
+
+ @Override
+ public String precision() {
+ return "NUMERIC_PRECISION";
+ }
+
+ @Override
+ public String scale() {
+ return "NUMERIC_SCALE";
+ }
+
+ @Override
+ public String columnPosition() {
+ return "ORDINAL_POSITION";
+ }
+
+ @Override
+ public String defaultValue() {
+ return "COLUMN_DEFAULT";
+ }
+
+ @Override
+ public String isNullable() {
+ return "IS_NULLABLE";
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-kingbase/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver b/dinky-metadata/dinky-metadata-kingbase/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
new file mode 100644
index 0000000000..9307fb8f87
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
@@ -0,0 +1 @@
+org.dinky.metadata.driver.KingbaseDriver
\ No newline at end of file
diff --git a/dinky-metadata/dinky-metadata-kingbase/src/test/java/org/dinky/metadata/KingbaseTest.java b/dinky-metadata/dinky-metadata-kingbase/src/test/java/org/dinky/metadata/KingbaseTest.java
new file mode 100644
index 0000000000..1bc3012e95
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-kingbase/src/test/java/org/dinky/metadata/KingbaseTest.java
@@ -0,0 +1,104 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata;
+
+import org.dinky.metadata.config.AbstractJdbcConfig;
+import org.dinky.metadata.config.DriverConfig;
+import org.dinky.metadata.driver.Driver;
+import org.dinky.metadata.result.JdbcSelectResult;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.Schema;
+import org.dinky.data.model.Table;
+
+import java.util.List;
+import java.util.Optional;
+import java.util.ServiceLoader;
+import java.util.UUID;
+
+import org.junit.Test;
+
+/**
+ * MysqlTest
+ *
+ * @author wenmo
+ * @since 2021/7/20 15:32
+ **/
+public class KingbaseTest {
+
+ private static final String IP = "192.168.210.86";
+
+ public Driver getDriver() {
+ DriverConfig config = new DriverConfig<>();
+ config.setName(UUID.randomUUID().toString());
+ config.setType("kingbase");
+ config.setConnectConfig(AbstractJdbcConfig.builder()
+ .ip(IP)
+ .port(54321)
+ .username("SYSTEM")
+ .password("12345678")
+ .url("jdbc:kingbase8://"+IP+":54321/EGOV?currentSchema=std23,PUBLIC,SYS_CATALOG&autoReconnect=false&useUnicode=true&characterEncoding=UTF-8&characterSetResults=UTF-8&zeroDateTimeBehavior=convertToNull&useSSL=false")
+ .build());
+ return Driver.build(config);
+ }
+
+ @Test
+ public void connectTest() {
+ String test = getDriver().test();
+ System.out.println(test);
+ System.out.println("end...");
+ }
+
+ @Test
+ public void schemaTest() {
+ Driver driver = getDriver();
+ List schemasAndTables = driver.getSchemasAndTables();
+ Table table = driver.getTable("std23", "egov_dispatch");
+ String sql = table.getFlinkTableSql("std23", "");
+ String select = driver.getSqlSelect(table);
+ String ddl = driver.getCreateTableSql(table);
+ driver.close();
+ System.out.println("end...");
+ }
+
+ @Test
+ public void columnTest() {
+ Driver driver = getDriver();
+ List columns = driver.listColumns("dca", "MENU");
+ System.out.println("end...");
+ }
+
+ @Test
+ public void queryTest() {
+ Driver driver = getDriver();
+ JdbcSelectResult query = driver.query("select * from MENU", 10);
+ System.out.println("end...");
+ }
+
+ @Test
+ public void testLoaderDriver(){
+ ServiceLoader drivers = ServiceLoader.load(Driver.class);
+ for (Driver driver : drivers) {
+ System.out.println("--> " + driver.getType() + ": " + driver.getName());
+ if (driver.canHandle("kingbase")) {
+ System.out.println("===> found kingbase.");
+ }
+ }
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-trino/pom.xml b/dinky-metadata/dinky-metadata-trino/pom.xml
new file mode 100644
index 0000000000..fff2517981
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/pom.xml
@@ -0,0 +1,70 @@
+
+
+
+ 4.0.0
+
+ org.dinky
+ dinky-metadata
+ ${revision}
+ ../pom.xml
+
+
+ dinky-metadata-trino
+ jar
+
+ ${target.java.version}
+ ${target.java.version}
+
+
+
+
+ org.dinky
+ dinky-metadata-base
+
+
+ com.alibaba
+ druid-spring-boot-starter
+ provided
+
+
+
+ org.slf4j
+ slf4j-nop
+ 1.6.1
+
+
+
+ junit
+ junit
+ provided
+
+
+ org.apache.commons
+ commons-lang3
+ 3.4
+
+
+ io.trino
+ trino-jdbc
+ 433
+ compile
+
+
+
+
diff --git a/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/constant/TrinoConstant.java b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/constant/TrinoConstant.java
new file mode 100644
index 0000000000..76beff5ed6
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/constant/TrinoConstant.java
@@ -0,0 +1,60 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.constant;
+
+public interface TrinoConstant {
+
+ /**
+ * 查询所有database
+ */
+ String QUERY_ALL_DATABASE = "show catalogs";
+ /**
+ * 查询某个schema下的所有表
+ */
+ String QUERY_ALL_TABLES_BY_SCHEMA = "show tables from %s";
+ /**
+ * 查询指定schema.table的信息 列 列类型 列注释
+ */
+ String QUERY_TABLE_SCHEMA = " describe %s.%s";
+ /**
+ * 只查询指定schema.table的列名
+ */
+ String QUERY_TABLE_COLUMNS_ONLY = "show schemas from %s";
+ /**
+ * 查询schema列名
+ */
+ String SCHEMA = "SCHEMA";
+ /**
+ * 需要排除的catalog
+ */
+ String EXTRA_SCHEMA = "system";
+ /**
+ * 需要排除的schema
+ */
+ String EXTRA_DB = "information_schema";
+
+ static String getQuotesNameList(String name){
+ return name == null ? null : getQuotesName(name.replaceAll("(\\w)(\\.)(\\w)", "$1\"$2\"$3"));
+ }
+
+ static String getQuotesName(String name){
+ return name == null ? null : ("\""+name.trim()+"\"").replaceAll("\"{2}", "\"");
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/convert/TrinoTypeConvert.java b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/convert/TrinoTypeConvert.java
new file mode 100644
index 0000000000..59560988e2
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/convert/TrinoTypeConvert.java
@@ -0,0 +1,135 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.convert;
+
+import org.dinky.assertion.Asserts;
+import org.dinky.data.enums.ColumnType;
+import org.dinky.data.model.Column;
+import org.dinky.metadata.config.AbstractJdbcConfig;
+
+public class TrinoTypeConvert implements ITypeConvert {
+
+ @Override
+ public ColumnType convert(Column column) {
+ ColumnType columnType = ColumnType.STRING;
+ if (Asserts.isNull(column)) {
+ return columnType;
+ }
+ String t = column.getType().toLowerCase().trim();
+ boolean isNullable = !column.isKeyFlag() && column.isNullable();
+ if (t.contains("char")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("boolean")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_BOOLEAN;
+ } else {
+ columnType = ColumnType.BOOLEAN;
+ }
+ } else if (t.contains("tinyint")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_BYTE;
+ } else {
+ columnType = ColumnType.BYTE;
+ }
+ } else if (t.contains("smallint")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_SHORT;
+ } else {
+ columnType = ColumnType.SHORT;
+ }
+ } else if (t.contains("bigint")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_LONG;
+ } else {
+ columnType = ColumnType.LONG;
+ }
+ } else if (t.contains("largeint")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("int")) {
+ if (isNullable) {
+ columnType = ColumnType.INTEGER;
+ } else {
+ columnType = ColumnType.INT;
+ }
+ } else if (t.contains("float")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_FLOAT;
+ } else {
+ columnType = ColumnType.FLOAT;
+ }
+ } else if (t.contains("double")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_DOUBLE;
+ } else {
+ columnType = ColumnType.DOUBLE;
+ }
+ } else if (t.contains("timestamp")) {
+ columnType = ColumnType.TIMESTAMP;
+ } else if (t.contains("date")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("datetime")) {
+ columnType = ColumnType.STRING;
+ } else if (t.contains("decimal")) {
+ columnType = ColumnType.DECIMAL;
+ } else if (t.contains("time")) {
+ if (isNullable) {
+ columnType = ColumnType.JAVA_LANG_DOUBLE;
+ } else {
+ columnType = ColumnType.DOUBLE;
+ }
+ }
+ return columnType;
+ }
+
+ @Override
+ public String convertToDB(ColumnType columnType) {
+ switch (columnType) {
+ case STRING:
+ return "varchar";
+ case BOOLEAN:
+ case JAVA_LANG_BOOLEAN:
+ return "boolean";
+ case BYTE:
+ case JAVA_LANG_BYTE:
+ return "tinyint";
+ case SHORT:
+ case JAVA_LANG_SHORT:
+ return "smallint";
+ case LONG:
+ case JAVA_LANG_LONG:
+ return "bigint";
+ case FLOAT:
+ case JAVA_LANG_FLOAT:
+ return "float";
+ case DOUBLE:
+ case JAVA_LANG_DOUBLE:
+ return "double";
+ case DECIMAL:
+ return "decimal";
+ case INT:
+ case INTEGER:
+ return "int";
+ case TIMESTAMP:
+ return "timestamp";
+ default:
+ return "varchar";
+ }
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/driver/TrinoDriver.java b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/driver/TrinoDriver.java
new file mode 100644
index 0000000000..08f81339bd
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/driver/TrinoDriver.java
@@ -0,0 +1,351 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.driver;
+
+import org.dinky.assertion.Asserts;
+import org.dinky.metadata.constant.TrinoConstant;
+import org.dinky.metadata.convert.ITypeConvert;
+import org.dinky.metadata.convert.TrinoTypeConvert;
+import org.dinky.metadata.query.IDBQuery;
+import org.dinky.metadata.query.TrinoQuery;
+import org.dinky.metadata.result.JdbcSelectResult;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.QueryData;
+import org.dinky.data.model.Schema;
+import org.dinky.data.model.Table;
+import org.dinky.utils.LogUtil;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+public class TrinoDriver extends AbstractJdbcDriver implements Driver {
+
+ @Override
+ public Table getTable(String schemaName, String tableName) {
+ List tables = listTables(schemaName);
+ Table table = null;
+ for (Table item : tables) {
+ if (Asserts.isEquals(item.getName(), tableName)) {
+ table = item;
+ break;
+ }
+ }
+ if (Asserts.isNotNull(table)) {
+ table.setColumns(listColumns(schemaName, table.getName()));
+ }
+ return table;
+ }
+
+ @Override
+ public List listTables(String schemaName) {
+ List tableList = new ArrayList<>();
+ PreparedStatement preparedStatement = null;
+ ResultSet results = null;
+ IDBQuery dbQuery = getDBQuery();
+ String sql = dbQuery.tablesSql(schemaName);
+ try {
+ preparedStatement = conn.get().prepareStatement(sql);
+ results = preparedStatement.executeQuery();
+ ResultSetMetaData metaData = results.getMetaData();
+ List columnList = new ArrayList<>();
+ for (int i = 1; i <= metaData.getColumnCount(); i++) {
+ columnList.add(metaData.getColumnLabel(i));
+ }
+ while (results.next()) {
+ String tableName = results.getString(dbQuery.tableName());
+ if (Asserts.isNotNullString(tableName)) {
+ Table tableInfo = new Table();
+ tableInfo.setName(tableName);
+ if (columnList.contains(dbQuery.tableComment())) {
+ tableInfo.setComment(results.getString(dbQuery.tableComment()));
+ }
+ tableInfo.setSchema(schemaName);
+ if (columnList.contains(dbQuery.tableType())) {
+ tableInfo.setType(results.getString(dbQuery.tableType()));
+ }
+ if (columnList.contains(dbQuery.catalogName())) {
+ tableInfo.setCatalog(results.getString(dbQuery.catalogName()));
+ }
+ if (columnList.contains(dbQuery.engine())) {
+ tableInfo.setEngine(results.getString(dbQuery.engine()));
+ }
+ tableList.add(tableInfo);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ close(preparedStatement, results);
+ }
+ return tableList;
+ }
+
+ @Override
+ public List getSchemasAndTables() {
+ return listSchemas();
+ }
+
+ @Override
+ public List listSchemas() {
+ List schemas = new ArrayList<>();
+ PreparedStatement preparedStatement = null;
+ ResultSet results = null;
+ PreparedStatement ps = null;
+ ResultSet rs = null;
+
+ String schemasSql = getDBQuery().schemaAllSql();
+ try {
+ preparedStatement = conn.get().prepareStatement(schemasSql);
+ results = preparedStatement.executeQuery();
+ while (results.next()) {
+ String schemaName = results.getString(getDBQuery().schemaName());
+ // !TrinoConstant.EXTRA_SCHEMA.equals(schemaName) filter system catalog
+ if (Asserts.isNotNullString(schemaName) && !TrinoConstant.EXTRA_SCHEMA.equals(schemaName)) {
+ ps = conn.get()
+ .prepareStatement(String.format(TrinoConstant.QUERY_TABLE_COLUMNS_ONLY, TrinoConstant.getQuotesNameList(schemaName)));
+ rs = ps.executeQuery();
+ while (rs.next()) {
+ String db = rs.getString(TrinoConstant.SCHEMA);
+ // !TrinoConstant.EXTRA_DB.equals(db) filter system schema
+ if (Asserts.isNotNullString(db) && !TrinoConstant.EXTRA_DB.equals(db)) {
+ Schema schema = new Schema(schemaName + "." + db);
+ schema.setTables(listTables(schema.getName()));
+ schemas.add(schema);
+ }
+ }
+ close(ps, rs);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ close(ps, rs);
+ close(preparedStatement, results);
+ }
+ return schemas;
+ }
+
+ @Override
+ public List listColumns(String schemaName, String tableName) {
+ List columns = new ArrayList<>();
+ PreparedStatement preparedStatement = null;
+ ResultSet results = null;
+ IDBQuery dbQuery = getDBQuery();
+ String tableFieldsSql = dbQuery.columnsSql(schemaName, tableName);
+ try {
+ preparedStatement = conn.get().prepareStatement(tableFieldsSql);
+ results = preparedStatement.executeQuery();
+ ResultSetMetaData metaData = results.getMetaData();
+ List columnList = new ArrayList<>();
+ for (int i = 1; i <= metaData.getColumnCount(); i++) {
+ columnList.add(metaData.getColumnLabel(i));
+ }
+ Integer positionId = 1;
+ while (results.next()) {
+ Column field = new Column();
+ if (StringUtils.isEmpty(results.getString(dbQuery.columnName()))) {
+ break;
+ } else {
+ if (columnList.contains(dbQuery.columnName())) {
+ String columnName = results.getString(dbQuery.columnName());
+ field.setName(columnName);
+ }
+ if (columnList.contains(dbQuery.columnType())) {
+ field.setType(results.getString(dbQuery.columnType()));
+ }
+ if (columnList.contains(dbQuery.columnComment())
+ && Asserts.isNotNull(results.getString(dbQuery.columnComment()))) {
+ String columnComment = results.getString(dbQuery.columnComment()).replaceAll("\"|'", "");
+ field.setComment(columnComment);
+ }
+ field.setPosition(positionId++);
+ field.setJavaType(getTypeConvert().convert(field));
+ }
+ columns.add(field);
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ } finally {
+ close(preparedStatement, results);
+ }
+ return columns;
+ }
+
+ @Override
+ public String getCreateTableSql(Table table) {
+ StringBuilder createTable = new StringBuilder();
+ PreparedStatement preparedStatement = null;
+ ResultSet results = null;
+ String createTableSql = getDBQuery().createTableSql(table.getSchema(), table.getName());
+ try {
+ preparedStatement = conn.get().prepareStatement(createTableSql);
+ results = preparedStatement.executeQuery();
+ ResultSetMetaData metaData = results.getMetaData();
+ while (results.next()) {
+ createTable.append(results.getString(getDBQuery().createTableName())).append("\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ close(preparedStatement, results);
+ }
+ return createTable.toString();
+ }
+
+ @Override
+ public int executeUpdate(String sql) throws Exception {
+ Asserts.checkNullString(sql, "Sql 语句为空");
+ String querySQL = sql.trim().replaceAll(";$", "");
+ int res = 0;
+ try (Statement statement = conn.get().createStatement()) {
+ res = statement.executeUpdate(querySQL);
+ }
+ return res;
+ }
+
+ @Override
+ public JdbcSelectResult query(String sql, Integer limit) {
+ if (Asserts.isNull(limit)) {
+ limit = 100;
+ }
+ JdbcSelectResult result = new JdbcSelectResult();
+ List> datas = new ArrayList<>();
+ List columns = new ArrayList<>();
+ List columnNameList = new ArrayList<>();
+ PreparedStatement preparedStatement = null;
+ ResultSet results = null;
+ int count = 0;
+ try {
+ String querySQL = sql.trim().replaceAll(";$", "");
+ preparedStatement = conn.get().prepareStatement(querySQL);
+ results = preparedStatement.executeQuery();
+ if (Asserts.isNull(results)) {
+ result.setSuccess(true);
+ close(preparedStatement, results);
+ return result;
+ }
+ ResultSetMetaData metaData = results.getMetaData();
+ for (int i = 1; i <= metaData.getColumnCount(); i++) {
+ columnNameList.add(metaData.getColumnLabel(i));
+ Column column = new Column();
+ column.setName(metaData.getColumnLabel(i));
+ column.setType(metaData.getColumnTypeName(i));
+ column.setAutoIncrement(metaData.isAutoIncrement(i));
+ column.setNullable(metaData.isNullable(i) == 0 ? false : true);
+ column.setJavaType(getTypeConvert().convert(column));
+ columns.add(column);
+ }
+ result.setColumns(columnNameList);
+ while (results.next()) {
+ LinkedHashMap data = new LinkedHashMap<>();
+ for (int i = 0; i < columns.size(); i++) {
+ data.put(columns.get(i).getName(),
+ getTypeConvert().convertValue(results, columns.get(i).getName(), columns.get(i).getType()));
+ }
+ datas.add(data);
+ count++;
+ if (count >= limit) {
+ break;
+ }
+ }
+ result.setSuccess(true);
+ } catch (Exception e) {
+ result.setError(LogUtil.getError(e));
+ result.setSuccess(false);
+ } finally {
+ close(preparedStatement, results);
+ result.setRowData(datas);
+ return result;
+ }
+ }
+
+ /**
+ * sql拼接 未实现分页
+ * */
+ @Override
+ public StringBuilder genQueryOption(QueryData queryData) {
+
+ String where = queryData.getOption().getWhere();
+ String order = queryData.getOption().getOrder();
+
+ StringBuilder optionBuilder = new StringBuilder()
+ .append("select * from ")
+ .append(queryData.getSchemaName())
+ .append(".")
+ .append(queryData.getTableName());
+
+ if (where != null && !where.equals("")) {
+ optionBuilder.append(" where ").append(where);
+ }
+ if (order != null && !order.equals("")) {
+ optionBuilder.append(" order by ").append(order);
+ }
+
+ return optionBuilder;
+ }
+
+ @Override
+ public IDBQuery getDBQuery() {
+ return new TrinoQuery();
+ }
+
+ @Override
+ public ITypeConvert getTypeConvert() {
+ return new TrinoTypeConvert();
+ }
+
+ @Override
+ String getDriverClass() {
+ return "io.trino.jdbc.TrinoDriver";
+ }
+
+ @Override
+ public String getType() {
+ return "Trino";
+ }
+
+ @Override
+ public String getName() {
+ return "Trino";
+ }
+
+ @Override
+ public Map getFlinkColumnTypeConversion() {
+ HashMap map = new HashMap<>();
+ map.put("BOOLEAN", "BOOLEAN");
+ map.put("TINYINT", "TINYINT");
+ map.put("SMALLINT", "SMALLINT");
+ map.put("INT", "INT");
+ map.put("VARCHAR", "STRING");
+ map.put("TEXT", "STRING");
+ map.put("DATETIME", "TIMESTAMP");
+ return map;
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/query/TrinoQuery.java b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/query/TrinoQuery.java
new file mode 100644
index 0000000000..7c215d18f1
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/main/java/org/dinky/metadata/query/TrinoQuery.java
@@ -0,0 +1,91 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.metadata.query;
+
+import org.dinky.metadata.constant.TrinoConstant;
+import org.apache.commons.lang3.StringUtils;
+
+public class TrinoQuery extends AbstractDBQuery {
+
+ @Override
+ public String schemaAllSql() {
+ return TrinoConstant.QUERY_ALL_DATABASE;
+ }
+
+ @Override
+ public String tablesSql(String schemaName) {
+ return StringUtils.isBlank(schemaName) ? "show tables;"
+ : String.format(TrinoConstant.QUERY_ALL_TABLES_BY_SCHEMA, TrinoConstant.getQuotesNameList(schemaName));
+ }
+
+ @Override
+ public String tablesSql(String schemaName, String tableName) {
+ StringBuilder sb = new StringBuilder("show tables");
+ if(StringUtils.isBlank(schemaName)) {
+ sb.append(" from " + TrinoConstant.getQuotesNameList(schemaName));
+ }
+ if(StringUtils.isBlank(tableName)) {
+ sb.append(" like '" + tableName + "'");
+ }
+ return sb.toString();
+// return StringUtils.isBlank(schemaName) ? TrinoConstant.QUERY_ALL_TABLES_BY_SCHEMA
+// : String.format(TrinoConstant.QUERY_ALL_TABLES_BY_SCHEMA, TrinoConstant.getQuotesNameList(schemaName));
+ }
+
+ @Override
+ public String columnsSql(String schemaName, String tableName) {
+ return String.format(TrinoConstant.QUERY_TABLE_SCHEMA, TrinoConstant.getQuotesNameList(schemaName), TrinoConstant.getQuotesName(tableName));
+ }
+
+ @Override
+ public String schemaName() {
+ return "Catalog";
+ }
+
+ @Override
+ public String createTableName() {
+ return "Create Table";
+ }
+
+ @Override
+ public String tableName() {
+ return "Table";
+ }
+
+ @Override
+ public String tableComment() {
+ return "Comment";
+ }
+
+ @Override
+ public String columnName() {
+ return "Column";
+ }
+
+ @Override
+ public String columnType() {
+ return "Type";
+ }
+
+ @Override
+ public String columnComment() {
+ return "Comment";
+ }
+}
diff --git a/dinky-metadata/dinky-metadata-trino/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver b/dinky-metadata/dinky-metadata-trino/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
new file mode 100644
index 0000000000..8709436284
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/main/resources/META-INF/services/org.dinky.metadata.driver.Driver
@@ -0,0 +1 @@
+org.dinky.metadata.driver.TrinoDriver
\ No newline at end of file
diff --git a/dinky-metadata/dinky-metadata-trino/src/test/java/org/dinky/TrinoTest.java b/dinky-metadata/dinky-metadata-trino/src/test/java/org/dinky/TrinoTest.java
new file mode 100644
index 0000000000..46b6d6eb2b
--- /dev/null
+++ b/dinky-metadata/dinky-metadata-trino/src/test/java/org/dinky/TrinoTest.java
@@ -0,0 +1,105 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky;
+
+import org.dinky.metadata.config.AbstractJdbcConfig;
+import org.dinky.metadata.config.DriverConfig;
+import org.dinky.metadata.driver.Driver;
+import org.dinky.metadata.result.JdbcSelectResult;
+import org.dinky.data.model.Column;
+import org.dinky.data.model.Schema;
+import org.dinky.data.model.Table;
+
+import java.sql.SQLException;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.UUID;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TrinoTest {
+
+ private Driver driver;
+
+ @Before
+ public void init() {
+ DriverConfig config = new DriverConfig<>();
+ config.setName(UUID.randomUUID().toString());
+ config.setType("Trino");
+
+ config.setConnectConfig(AbstractJdbcConfig.builder()
+ .ip("192.168.0.84")
+ .port(8981)
+ .username("root")
+ .password("")
+ .url("jdbc:trino://192.168.0.84:8981")
+ .build());
+
+ try {
+ driver = Driver.build(config);
+ } catch (Exception e) {
+ System.err.println("连接创建失败:" + e.getMessage());
+ }
+ }
+
+ @Test
+ public void test() throws SQLException {
+ // test
+ String test = driver.test();
+ System.out.println(test);
+ System.out.println("schema && table...");
+ testSchema();
+ System.out.println("columns...");
+ testColumns();
+ System.out.println("query...");
+ query();
+ }
+
+ @Test
+ public void testSchema() {
+ // schema && table
+ List schemasAndTables = driver.getSchemasAndTables();
+ for (Schema schemasAndTable : schemasAndTables) {
+ List tables = schemasAndTable.getTables();
+ for (Table table : tables) {
+ System.out.println(table.getName() + " " + table.getSchema());
+ }
+ }
+ }
+
+ @Test
+ public void testColumns() {
+ // columns
+ List columns = driver.listColumns("paimon.ads", "ads_login_by_dept_1h");
+ for (Column column : columns) {
+ System.out.println(column.getName() + " " + column.getType() + " " + column.getComment());
+ }
+ }
+
+ @Test
+ public void query() {
+ JdbcSelectResult selectResult = driver.query("select * from paimon.ads.ads_login_by_dept_1h", 10);
+ List