From e5078aead879ee3740194dda65da5dfec80a09c0 Mon Sep 17 00:00:00 2001 From: "Connell, Joseph" Date: Fri, 6 Jun 2025 17:29:04 -0700 Subject: [PATCH 1/2] vscode git ignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index f015f9056..613308396 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ node_modules # Local Netlify folder .netlify +.vscode/settings.json From c0b952a8f3335b4379a5ee92cfb6211ba6b4276f Mon Sep 17 00:00:00 2001 From: "Connell, Joseph" Date: Tue, 10 Jun 2025 22:58:05 -0700 Subject: [PATCH 2/2] Base Databricks API and Frontend Integration --- .../lowcoder-design/src/icons/index.tsx | 1 + .../src/icons/v1/icon-query-databricks.svg | 10 ++ .../lowcoder/src/api/datasourceApi.ts | 8 + .../comps/formComp/generate/databricks.tsx | 64 ++++++++ .../comps/comps/formComp/generate/index.tsx | 3 + .../queries/queryComp/queryPropertyView.tsx | 2 +- .../src/constants/datasourceConstants.ts | 1 + .../lowcoder/src/constants/queryConstants.ts | 2 + .../form/databricksDatasourceForm.tsx | 139 ++++++++++++++++ .../form/datasourceFormRegistry.tsx | 2 + .../components/DataSourcesTab.tsx | 1 + .../lowcoder/src/util/bottomResUtils.tsx | 3 + .../impl/DatasourceMetaInfoServiceImpl.java | 8 + .../lowcoder-plugins/databricksPlugin/pom.xml | 111 +++++++++++++ .../databricks/DatabricksConnector.java | 86 ++++++++++ .../plugin/databricks/DatabricksPlugin.java | 11 ++ .../databricks/DatabricksQueryExecutor.java | 86 ++++++++++ .../gui/DatabricksBulkInsertCommand.java | 32 ++++ .../gui/DatabricksBulkUpdateCommand.java | 33 ++++ .../gui/DatabricksDeleteCommand.java | 50 ++++++ .../gui/DatabricksInsertCommand.java | 38 +++++ .../gui/DatabricksUpdateCommand.java | 49 ++++++ .../plugin/databricks/gui/GuiConstants.java | 8 + .../model/DatabricksAuthMechanism.java | 18 ++ .../model/DatabricksConnectionUriParser.java | 155 ++++++++++++++++++ .../model/DatabricksDatasourceConfig.java | 155 ++++++++++++++++++ .../model/DatabricksQueryConfig.java | 75 +++++++++ .../util/DatabricksResultParser.java | 43 +++++ .../util/DatabricksStructureParser.java | 50 ++++++ .../src/main/resources/databricks-icon.svg | 10 ++ .../src/main/resources/databricks-plugin.json | 67 ++++++++ .../DatabricksQueryExecutorTest.java | 43 +++++ server/api-service/lowcoder-plugins/pom.xml | 6 + 33 files changed, 1369 insertions(+), 1 deletion(-) create mode 100644 client/packages/lowcoder-design/src/icons/v1/icon-query-databricks.svg create mode 100644 client/packages/lowcoder/src/comps/comps/formComp/generate/databricks.tsx create mode 100644 client/packages/lowcoder/src/pages/datasource/form/databricksDatasourceForm.tsx create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/pom.xml create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksConnector.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksPlugin.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutor.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkInsertCommand.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkUpdateCommand.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksDeleteCommand.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksInsertCommand.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksUpdateCommand.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/GuiConstants.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksAuthMechanism.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksConnectionUriParser.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksDatasourceConfig.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksQueryConfig.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksResultParser.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksStructureParser.java create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-icon.svg create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-plugin.json create mode 100644 server/api-service/lowcoder-plugins/databricksPlugin/src/test/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutorTest.java diff --git a/client/packages/lowcoder-design/src/icons/index.tsx b/client/packages/lowcoder-design/src/icons/index.tsx index 94453db48..0eace6d2f 100644 --- a/client/packages/lowcoder-design/src/icons/index.tsx +++ b/client/packages/lowcoder-design/src/icons/index.tsx @@ -144,6 +144,7 @@ export { ReactComponent as MongoIcon } from "./v1/icon-query-MongoDB.svg"; export { ReactComponent as PostgresIcon } from "./v1/icon-query-postgres.svg"; export { ReactComponent as RedisIcon } from "./v1/icon-query-Redis.svg"; export { ReactComponent as MSSQLIcon } from "./v1/icon-query-mssql.svg"; +export { ReactComponent as DatabricksIcon } from "./v1/icon-query-databricks.svg"; export { ReactComponent as SMTPIcon } from "./v1/icon-query-SMTP.svg"; export { ReactComponent as OracleIcon } from "./v1/icon-query-OracleDB.svg"; export { ReactComponent as ClickHouseIcon } from "./v1/icon-query-ClickHouse.svg"; diff --git a/client/packages/lowcoder-design/src/icons/v1/icon-query-databricks.svg b/client/packages/lowcoder-design/src/icons/v1/icon-query-databricks.svg new file mode 100644 index 000000000..78ef0bffb --- /dev/null +++ b/client/packages/lowcoder-design/src/icons/v1/icon-query-databricks.svg @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/client/packages/lowcoder/src/api/datasourceApi.ts b/client/packages/lowcoder/src/api/datasourceApi.ts index 1be29e646..752a7e491 100644 --- a/client/packages/lowcoder/src/api/datasourceApi.ts +++ b/client/packages/lowcoder/src/api/datasourceApi.ts @@ -27,6 +27,13 @@ export interface SQLConfig extends PreparedStatementConfig { usingSsl: boolean; } +export interface DatabricksConfig extends SQLConfig { + authMechanism: string; // 3: Personal Access Token, 11: OAuth + usingUri: boolean; + jdbcUri: string; + httpPath: string; + catalog: string; +} export interface MongoConfig extends SQLConfig { uri: string; usingUri: boolean; @@ -123,6 +130,7 @@ export type DatasourceConfigType = | SQLConfig | HttpConfig | MongoConfig + | DatabricksConfig | OAuthBasicConfig | EsConfig | OracleConfig diff --git a/client/packages/lowcoder/src/comps/comps/formComp/generate/databricks.tsx b/client/packages/lowcoder/src/comps/comps/formComp/generate/databricks.tsx new file mode 100644 index 000000000..3cab3b2bd --- /dev/null +++ b/client/packages/lowcoder/src/comps/comps/formComp/generate/databricks.tsx @@ -0,0 +1,64 @@ +import { + DataSourceTypeConfig, + FullColumnInfo, + generateInsertSql, + QueryDataType, +} from "./dataSourceCommon"; +import { + CompSelection, + dateCompSelection, + allCompSelection, + numberCompSelection, + timeCompSelection, + dateTimeCompSelection, +} from "./comp"; + +function getCompSelection(columnType: string): CompSelection | undefined { + if (!columnType) { + return undefined; + } + switch (columnType.toLowerCase()) { + case "bit": + case "tinyint": + case "smallint": + case "int": + case "bigint": + case "dec": + case "decimal": + case "numeric": + case "smallmoney": + case "money": + case "float": + case "real": + return numberCompSelection(true); + case "date": + return dateCompSelection(); + case "datetime": + case "smalldatetime": + return dateTimeCompSelection(); + case "time": + return timeCompSelection(); + } + return allCompSelection(); +} + +function getQueryInitData( + formName: string, + tableName: string, + infos: FullColumnInfo[] +): QueryDataType { + return { + compType: "databricks", + comp: { + sql: generateInsertSql(tableName, infos), + commandType: "INSERT", + mode: "SQL", + }, + }; +} + +export const databricksConfig: DataSourceTypeConfig = { + type: "databricks", + getCompSelection, + getQueryInitData, +}; diff --git a/client/packages/lowcoder/src/comps/comps/formComp/generate/index.tsx b/client/packages/lowcoder/src/comps/comps/formComp/generate/index.tsx index 7fdffe2b7..1ec29e8f6 100644 --- a/client/packages/lowcoder/src/comps/comps/formComp/generate/index.tsx +++ b/client/packages/lowcoder/src/comps/comps/formComp/generate/index.tsx @@ -3,6 +3,7 @@ import { msSqlConfig } from "./mssql"; import { mysqlConfig } from "./mysql"; import { oracleSqlConfig } from "./oracle"; import { postgreSqlConfig } from "./postgresql"; +import { databricksConfig } from "./databricks"; import { DatasourceType } from "@lowcoder-ee/constants/queryConstants"; export function getDataSourceTypeConfig( @@ -14,6 +15,8 @@ export function getDataSourceTypeConfig( return mysqlConfig; case "postgres": return postgreSqlConfig; + case "databricks": + return databricksConfig; case "mssql": return msSqlConfig; case "oracle": diff --git a/client/packages/lowcoder/src/comps/queries/queryComp/queryPropertyView.tsx b/client/packages/lowcoder/src/comps/queries/queryComp/queryPropertyView.tsx index d78f7d6ab..bf183626f 100644 --- a/client/packages/lowcoder/src/comps/queries/queryComp/queryPropertyView.tsx +++ b/client/packages/lowcoder/src/comps/queries/queryComp/queryPropertyView.tsx @@ -444,7 +444,7 @@ export const QueryGeneralPropertyView = (props: {
- {["postgres", "mysql", "mssql", "oracle", "mariadb"].includes(datasourceType) && ( + {["databricks", "postgres", "mysql", "mssql", "oracle", "mariadb"].includes(datasourceType) && ( [] = [ "mysql", "mongodb", "postgres", + "databricks", "redis", "es", "mssql", diff --git a/client/packages/lowcoder/src/constants/queryConstants.ts b/client/packages/lowcoder/src/constants/queryConstants.ts index be78de0d6..900c7db67 100644 --- a/client/packages/lowcoder/src/constants/queryConstants.ts +++ b/client/packages/lowcoder/src/constants/queryConstants.ts @@ -24,6 +24,7 @@ export type DatasourceType = | "redis" | "es" | "mssql" + | "databricks" | "smtp" | "oracle" | "clickHouse" @@ -46,6 +47,7 @@ export const QueryMap = { redis: RedisQuery, es: EsQuery, mssql: SQLQuery, + databricks: SQLQuery, smtp: SMTPQuery, oracle: SQLQuery, clickHouse: SQLQuery, diff --git a/client/packages/lowcoder/src/pages/datasource/form/databricksDatasourceForm.tsx b/client/packages/lowcoder/src/pages/datasource/form/databricksDatasourceForm.tsx new file mode 100644 index 000000000..c9ae59494 --- /dev/null +++ b/client/packages/lowcoder/src/pages/datasource/form/databricksDatasourceForm.tsx @@ -0,0 +1,139 @@ +import React, { useState, useEffect } from "react"; +import { DatasourceForm, FormInputItem, FormSection, FormSelectItem, FormInputPasswordItem, FormNumberInputItem, FormCheckboxItem } from "lowcoder-design"; +import { DatabricksConfig } from "api/datasourceApi"; +import { DatasourceFormProps } from "./datasourceFormRegistry"; +import { useHostCheck } from "./useHostCheck"; +import { trans } from "i18n"; +import { + DatabaseFormInputItem, + DatasourceNameFormInputItem, + encryptedPlaceholder, + HostFormInputItem, + PasswordFormInputItem, + PortFormInputItem, + SSLFormCheckboxItem, + // UserNameFormInputItem, // removed +} from "../form"; + +export const DatabricksDatasourceForm = (props: DatasourceFormProps) => { + const { form, datasource, size } = props; + const datasourceConfig = datasource?.datasourceConfig as DatabricksConfig; + const [usingUri, setUsingUri] = useState(datasourceConfig?.usingUri); + const hostRule = useHostCheck(); + + // Set username to "token" if Auth Mechanism is PAT (3) + useEffect(() => { + if (!usingUri && form.getFieldValue("authMechanism") === "3") { + form.setFieldsValue({ username: "token" }); + } + }, [usingUri, form.getFieldValue("authMechanism"), form]); + + return ( + + + + + + + setUsingUri(value === "true")} + /> + + + + {usingUri ? ( + + ) : ( + <> + + + + + + + + + + + + + + + + + + )} + + + ); +}; diff --git a/client/packages/lowcoder/src/pages/datasource/form/datasourceFormRegistry.tsx b/client/packages/lowcoder/src/pages/datasource/form/datasourceFormRegistry.tsx index 9c2abb612..af2ceb3b3 100644 --- a/client/packages/lowcoder/src/pages/datasource/form/datasourceFormRegistry.tsx +++ b/client/packages/lowcoder/src/pages/datasource/form/datasourceFormRegistry.tsx @@ -9,6 +9,7 @@ import { GoogleSheetsDatasourceForm } from "./googleSheetsDatasourceForm"; import { DatasourceType } from "@lowcoder-ee/constants/queryConstants"; import { Datasource } from "@lowcoder-ee/constants/datasourceConstants"; import { sqlDatasourceForm } from "./sqlDatasourceForm"; +import { DatabricksDatasourceForm } from "./databricksDatasourceForm"; import { GraphqlDatasourceForm } from "./graphqlDatasourceForm"; import { OracleDatasourceForm } from "./oracleDatasourceForm"; import { DataSourceTypeInfo } from "api/datasourceApi"; @@ -55,4 +56,5 @@ export const DatasourceFormRegistry: Partial = ({ environment, workspaceI 'elasticsearch': '#005571', 'oracle': '#F80000', 'mssql': '#CC2927', + 'databricks': '#F55322', 'snowflake': '#29B5E8' }; diff --git a/client/packages/lowcoder/src/util/bottomResUtils.tsx b/client/packages/lowcoder/src/util/bottomResUtils.tsx index b2f2baf42..e24bffa66 100644 --- a/client/packages/lowcoder/src/util/bottomResUtils.tsx +++ b/client/packages/lowcoder/src/util/bottomResUtils.tsx @@ -14,6 +14,7 @@ import { MariaDBIcon, MongoIcon, MSSQLIcon, + DatabricksIcon, MysqlIcon, OptionsApiIcon, OracleIcon, @@ -124,6 +125,8 @@ export const getBottomResIcon = ( return ; case "mssql": return ; + case "databricks": + return ; case "smtp": return ; case "oracle": diff --git a/server/api-service/lowcoder-domain/src/main/java/org/lowcoder/domain/plugin/service/impl/DatasourceMetaInfoServiceImpl.java b/server/api-service/lowcoder-domain/src/main/java/org/lowcoder/domain/plugin/service/impl/DatasourceMetaInfoServiceImpl.java index 979ae6b4e..7c0d79e52 100644 --- a/server/api-service/lowcoder-domain/src/main/java/org/lowcoder/domain/plugin/service/impl/DatasourceMetaInfoServiceImpl.java +++ b/server/api-service/lowcoder-domain/src/main/java/org/lowcoder/domain/plugin/service/impl/DatasourceMetaInfoServiceImpl.java @@ -93,6 +93,14 @@ public class DatasourceMetaInfoServiceImpl implements DatasourceMetaInfoService .connectionPool(ClientBasedConnectionPool.class) .build(); + private static final DatasourceMetaInfo DATABRICKS = DatasourceMetaInfo.builder() + .type("databricks") + .displayName("Databricks") + .pluginExecutorKey("databricks-plugin") + .hasStructureInfo(true) + .connectionPool(ClientBasedConnectionPool.class) + .build(); + private static final DatasourceMetaInfo ORACLE = DatasourceMetaInfo.builder() .type("oracle") .displayName("Oracle") diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/pom.xml b/server/api-service/lowcoder-plugins/databricksPlugin/pom.xml new file mode 100644 index 000000000..633bdd6ab --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/pom.xml @@ -0,0 +1,111 @@ + + + + + org.lowcoder + lowcoder-plugins + ${revision} + + + 4.0.0 + org.lowcoder.plugins + databricksPlugin + jar + + databricksPlugin + + + UTF-8 + 17 + ${java.version} + ${java.version} + databricks-plugin + org.lowcoder.plugin.databricks.DatabricksPlugin + ${revision} + service@lowcoder.org + + + + + + org.lowcoder + sqlBasedPlugin + compile + + + com.databricks + databricks-jdbc + 2.6.36 + + + org.projectlombok + lombok + provided + + + org.junit.jupiter + junit-jupiter-api + 5.8.2 + test + + + org.mockito + mockito-core + 4.5.1 + test + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.2.4 + + + shade-plugin-jar + package + + shade + + + false + + + + ${plugin.id} + ${plugin.class} + ${plugin.version} + ${plugin.provider} + + + + + + + + + maven-dependency-plugin + + + copy-dependencies + package + + copy-dependencies + + + runtime + ${project.build.directory}/lib + + + + + + maven-antrun-plugin + + + + \ No newline at end of file diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksConnector.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksConnector.java new file mode 100644 index 000000000..627cbbbbd --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksConnector.java @@ -0,0 +1,86 @@ +package org.lowcoder.plugin.databricks; + +import com.zaxxer.hikari.HikariConfig; +import org.lowcoder.plugin.databricks.model.*; +import org.apache.commons.lang3.StringUtils; + +import java.util.Set; +import java.util.HashSet; + +public class DatabricksConnector { + + private static final String JDBC_DRIVER = "com.databricks.client.jdbc.Driver"; + + public DatabricksConnector() { + super(); + } + + protected String getJdbcDriver() { + return JDBC_DRIVER; + } + + protected void setUpConfigs(DatabricksDatasourceConfig databricksDatasourceConfig, HikariConfig config) { + + config.setDriverClassName(JDBC_DRIVER); + + String catalog = databricksDatasourceConfig.getCatalog(); + String schema = databricksDatasourceConfig.getDatabase(); + String httpPath = databricksDatasourceConfig.getHttpPath(); + String password = databricksDatasourceConfig.getPassword(); + Boolean usingSsl = databricksDatasourceConfig.isUsingSsl(); + DatabricksAuthMechanism authMechanism = databricksDatasourceConfig.getAuthMechanism(); + String username = databricksDatasourceConfig.getUsername(); + + // Build JDBC URL with schema in path if provided + String url = buildJdbcUrl(databricksDatasourceConfig); + + config.setJdbcUrl(url); + config.addDataSourceProperty("HttpPath", httpPath); + config.addDataSourceProperty("AuthMech", authMechanism.getValue()); + config.addDataSourceProperty("UID", username); + config.addDataSourceProperty("PWD", password); + if (catalog != null && !catalog.isEmpty()) { + config.addDataSourceProperty("ConnCatalog", catalog); + } + if (schema != null && !schema.isEmpty()) { + config.addDataSourceProperty("ConnSchema", schema); + } + + if (usingSsl != null && usingSsl) { + config.addDataSourceProperty("ssl", "true"); + config.addDataSourceProperty("sslmode", "require"); + } else { + config.addDataSourceProperty("ssl", "false"); + config.addDataSourceProperty("sslmode", "disable"); + } + + // Readonly is optional, set to false by default + config.setReadOnly(false); + } + + public Set validateConfig(DatabricksDatasourceConfig connectionConfig) { + Set validates = new HashSet<>(); + if (StringUtils.isBlank(connectionConfig.getHost())) { + validates.add("INVALID_HOST_CONFIG"); + } + // Optionally validate other required fields here + return validates; + } + + private String buildJdbcUrl(DatabricksDatasourceConfig config) { + String url; + if (config.isUsingUri()) { + if (StringUtils.isBlank(config.getUri())) { + throw new IllegalArgumentException("JDBC URI must be provided when usingUri is true"); + } + url = config.getUri(); + } else { + if (config.getSchema() != null && !config.getSchema().isEmpty()) { + url = String.format("jdbc:databricks://%s:%d/%s", config.getHost(), config.getPort(), config.getSchema()); + } else { + url = String.format("jdbc:databricks://%s:%d", config.getHost(), config.getPort()); + } + } + return url; + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksPlugin.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksPlugin.java new file mode 100644 index 000000000..cfa49c5c8 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksPlugin.java @@ -0,0 +1,11 @@ +package org.lowcoder.plugin.databricks; + +import org.pf4j.Plugin; +import org.pf4j.PluginWrapper; + +public class DatabricksPlugin extends Plugin { + + public DatabricksPlugin(PluginWrapper wrapper) { + super(wrapper); + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutor.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutor.java new file mode 100644 index 000000000..34895fd9a --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutor.java @@ -0,0 +1,86 @@ +package org.lowcoder.plugin.databricks; + +import static org.lowcoder.plugin.databricks.util.DatabricksStructureParser.parseTableAndColumns; +import static org.lowcoder.sdk.exception.PluginCommonError.DATASOURCE_GET_STRUCTURE_ERROR; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.lowcoder.plugin.databricks.gui.DatabricksDeleteCommand; +import org.lowcoder.plugin.databricks.gui.DatabricksInsertCommand; +import org.lowcoder.plugin.databricks.gui.DatabricksUpdateCommand; +import org.lowcoder.plugin.databricks.gui.DatabricksBulkInsertCommand; +import org.lowcoder.plugin.databricks.gui.DatabricksBulkUpdateCommand; +import org.lowcoder.plugin.databricks.util.DatabricksResultParser; +import org.lowcoder.plugin.sql.GeneralSqlExecutor; +import org.lowcoder.plugin.sql.SqlBasedQueryExecutor; +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.models.DatasourceStructure; +import org.lowcoder.sdk.models.DatasourceStructure.Table; +import org.lowcoder.sdk.plugin.common.sql.SqlBasedDatasourceConnectionConfig; +import org.lowcoder.sdk.plugin.sqlcommand.GuiSqlCommand; +import org.pf4j.Extension; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Extension +public class DatabricksQueryExecutor extends SqlBasedQueryExecutor { + + // PF4J requires a public no-arg constructor + public DatabricksQueryExecutor() { + super(new GeneralSqlExecutor() { + @Override + protected List> parseDataRows(ResultSet resultSet) throws SQLException { + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + List> result = new ArrayList<>(); + while (resultSet.next()) { + Map row = DatabricksResultParser.parseRowValue(resultSet, metaData, columnCount); + result.add(row); + } + return result; + } + // Prepared statement support is handled by the base class using the query config's disablePreparedStatement flag. + }); + } + + @Override + protected DatasourceStructure getDatabaseMetadata(Connection connection, SqlBasedDatasourceConnectionConfig connectionConfig) { + Map tablesByName = new LinkedHashMap<>(); + try (Statement statement = connection.createStatement()) { + parseTableAndColumns(tablesByName, statement); + } catch (SQLException throwable) { + throw new PluginException(DATASOURCE_GET_STRUCTURE_ERROR, "DATASOURCE_GET_STRUCTURE_ERROR", throwable.getMessage()); + } + + DatasourceStructure structure = new DatasourceStructure(new ArrayList<>(tablesByName.values())); + for (Table table : structure.getTables()) { + table.getKeys().sort(Comparator.naturalOrder()); + } + return structure; + } + + @Override + protected GuiSqlCommand parseSqlCommand(String guiStatementType, Map detail) { + // Use new field names and allow for future extensibility + String type = guiStatementType != null ? guiStatementType : ""; + return switch (type.toUpperCase()) { + case "INSERT" -> DatabricksInsertCommand.from(detail); + case "UPDATE" -> DatabricksUpdateCommand.from(detail); + case "DELETE" -> DatabricksDeleteCommand.from(detail); + case "BULK_INSERT" -> DatabricksBulkInsertCommand.from(detail); + case "BULK_UPDATE" -> DatabricksBulkUpdateCommand.from(detail); + default -> throw new PluginException(QUERY_ARGUMENT_ERROR, "INVALID_GUI_COMMAND_TYPE", type); + }; + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkInsertCommand.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkInsertCommand.java new file mode 100644 index 000000000..ee01fac36 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkInsertCommand.java @@ -0,0 +1,32 @@ +package org.lowcoder.plugin.databricks.gui; + +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_BACK; +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_FRONT; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.plugin.sqlcommand.GuiSqlCommand; +import org.lowcoder.sdk.plugin.sqlcommand.changeset.BulkObjectChangeSet; +import org.lowcoder.sdk.plugin.sqlcommand.command.BulkInsertCommand; + +import java.util.Map; + +public class DatabricksBulkInsertCommand extends BulkInsertCommand { + + protected DatabricksBulkInsertCommand(String table, BulkObjectChangeSet bulkObjectChangeSet) { + super(table, bulkObjectChangeSet, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + public static BulkInsertCommand from(Map commandDetail) { + if (commandDetail == null || commandDetail.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "BULK_INSERT_COMMAND_DETAIL_EMPTY"); + } + String table = GuiSqlCommand.parseTable(commandDetail); + String recordStr = BulkObjectChangeSet.parseBulkRecords(commandDetail); + if (table == null || table.isEmpty() || recordStr == null || recordStr.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "BULK_INSERT_COMMAND_MISSING_FIELDS"); + } + BulkObjectChangeSet bulkObjectChangeSet = new BulkObjectChangeSet(recordStr); + return new DatabricksBulkInsertCommand(table, bulkObjectChangeSet); + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkUpdateCommand.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkUpdateCommand.java new file mode 100644 index 000000000..6e66660ab --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksBulkUpdateCommand.java @@ -0,0 +1,33 @@ +package org.lowcoder.plugin.databricks.gui; + +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_FRONT; +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_BACK; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.plugin.sqlcommand.GuiSqlCommand; +import org.lowcoder.sdk.plugin.sqlcommand.changeset.BulkObjectChangeSet; +import org.lowcoder.sdk.plugin.sqlcommand.command.BulkUpdateCommand; + +import java.util.Map; + +public class DatabricksBulkUpdateCommand extends BulkUpdateCommand { + + protected DatabricksBulkUpdateCommand(String table, BulkObjectChangeSet bulkObjectChangeSet, String primaryKey) { + super(table, bulkObjectChangeSet, primaryKey, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + public static DatabricksBulkUpdateCommand from(Map commandDetail) { + if (commandDetail == null || commandDetail.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "BULK_UPDATE_COMMAND_DETAIL_EMPTY"); + } + String table = GuiSqlCommand.parseTable(commandDetail); + String recordStr = BulkObjectChangeSet.parseBulkRecords(commandDetail); + String primaryKey = BulkObjectChangeSet.parsePrimaryKey(commandDetail); + if (table == null || table.isEmpty() || recordStr == null || recordStr.isEmpty() || primaryKey == null || primaryKey.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "BULK_UPDATE_COMMAND_MISSING_FIELDS"); + } + BulkObjectChangeSet bulkObjectChangeSet = new BulkObjectChangeSet(recordStr); + return new DatabricksBulkUpdateCommand(table, bulkObjectChangeSet, primaryKey); + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksDeleteCommand.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksDeleteCommand.java new file mode 100644 index 000000000..a918fc6b5 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksDeleteCommand.java @@ -0,0 +1,50 @@ +package org.lowcoder.plugin.databricks.gui; + +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_BACK; +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_FRONT; +import static org.lowcoder.sdk.plugin.sqlcommand.filter.FilterSet.parseFilterSet; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import java.util.Map; + +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.plugin.sqlcommand.GuiSqlCommand; +import org.lowcoder.sdk.plugin.sqlcommand.command.DeleteCommand; +import org.lowcoder.sdk.plugin.sqlcommand.filter.FilterSet; + +public class DatabricksDeleteCommand extends DeleteCommand { + + protected DatabricksDeleteCommand(String table, FilterSet filterSet, boolean allowMultiModify) { + super(table, filterSet, allowMultiModify, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + public static DeleteCommand from(Map commandDetail) { + if (commandDetail == null || commandDetail.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "DELETE_COMMAND_DETAIL_EMPTY"); + } + String table = GuiSqlCommand.parseTable(commandDetail); + FilterSet filterSet = parseFilterSet(commandDetail); + boolean allowMultiModify = GuiSqlCommand.parseAllowMultiModify(commandDetail); + if (table == null || table.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "DELETE_COMMAND_MISSING_TABLE"); + } + return new DatabricksDeleteCommand(table, filterSet, allowMultiModify); + } + + @Override + public GuiSqlCommandRenderResult render(Map requestMap) { + return super.render(requestMap); + } + + @Override + protected void renderTable(String renderedTable, StringBuilder sb) { + sb.append("delete from ").append(renderedTable); + } + + @Override + protected void renderLimit(StringBuilder sb) { + if (allowMultiModify) { + sb.append(" limit 1"); + } + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksInsertCommand.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksInsertCommand.java new file mode 100644 index 000000000..c67a305ec --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksInsertCommand.java @@ -0,0 +1,38 @@ +package org.lowcoder.plugin.databricks.gui; + +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_BACK; +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_FRONT; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import java.util.Map; + +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.plugin.sqlcommand.changeset.ChangeSet; +import org.lowcoder.sdk.plugin.sqlcommand.command.InsertCommand; + +import com.google.common.annotations.VisibleForTesting; + +public class DatabricksInsertCommand extends InsertCommand { + + private DatabricksInsertCommand(Map commandDetail) { + super(commandDetail, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + @VisibleForTesting + protected DatabricksInsertCommand(String table, ChangeSet changeSet) { + super(table, changeSet, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + public static DatabricksInsertCommand from(Map detail) { + if (detail == null || detail.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "INSERT_COMMAND_DETAIL_EMPTY"); + } + // Optionally validate required fields, e.g. "table" and "values" + if (!detail.containsKey("table") || !detail.containsKey("values")) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "INSERT_COMMAND_MISSING_FIELDS"); + } + return new DatabricksInsertCommand(detail); + } + + // No changes needed for DatabricksInsertCommand +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksUpdateCommand.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksUpdateCommand.java new file mode 100644 index 000000000..b7894983e --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/DatabricksUpdateCommand.java @@ -0,0 +1,49 @@ +package org.lowcoder.plugin.databricks.gui; + +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_BACK; +import static org.lowcoder.plugin.databricks.gui.GuiConstants.DATABRICKS_COLUMN_DELIMITER_FRONT; +import static org.lowcoder.sdk.exception.PluginCommonError.QUERY_ARGUMENT_ERROR; + +import java.util.Map; + +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.sdk.plugin.sqlcommand.changeset.ChangeSet; +import org.lowcoder.sdk.plugin.sqlcommand.command.UpdateCommand; +import org.lowcoder.sdk.plugin.sqlcommand.filter.FilterSet; + +import com.google.common.annotations.VisibleForTesting; + +public class DatabricksUpdateCommand extends UpdateCommand { + + private DatabricksUpdateCommand(Map commandDetail) { + super(commandDetail, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + @VisibleForTesting + protected DatabricksUpdateCommand(String table, ChangeSet changeSet, FilterSet filterSet, boolean allowMultiModify) { + super(table, changeSet, filterSet, allowMultiModify, DATABRICKS_COLUMN_DELIMITER_FRONT, DATABRICKS_COLUMN_DELIMITER_BACK); + } + + @Override + protected void appendTable(String renderedTable, StringBuilder sb) { + sb.append("update ").append(renderedTable); + } + + @Override + protected void appendLimit(StringBuilder sb) { + if (allowMultiModify) { + sb.append(" limit 1"); + } + } + + public static DatabricksUpdateCommand from(Map commandDetail) { + if (commandDetail == null || commandDetail.isEmpty()) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "UPDATE_COMMAND_DETAIL_EMPTY"); + } + // Optionally validate required fields, e.g. "table" and "set" + if (!commandDetail.containsKey("table") || !commandDetail.containsKey("set")) { + throw new PluginException(QUERY_ARGUMENT_ERROR, "UPDATE_COMMAND_MISSING_FIELDS"); + } + return new DatabricksUpdateCommand(commandDetail); + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/GuiConstants.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/GuiConstants.java new file mode 100644 index 000000000..b4697cca1 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/gui/GuiConstants.java @@ -0,0 +1,8 @@ +package org.lowcoder.plugin.databricks.gui; + +public final class GuiConstants { + + public static final String DATABRICKS_COLUMN_DELIMITER_FRONT = "`"; + public static final String DATABRICKS_COLUMN_DELIMITER_BACK = "`"; + +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksAuthMechanism.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksAuthMechanism.java new file mode 100644 index 000000000..7dc466ca0 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksAuthMechanism.java @@ -0,0 +1,18 @@ +package org.lowcoder.plugin.databricks.model; + +public enum DatabricksAuthMechanism { + PAT_TOKEN(3), + OAUTH(11), + DEFAULT(3); // Set this to your preferred default value + + private final int value; + + DatabricksAuthMechanism(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + +} \ No newline at end of file diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksConnectionUriParser.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksConnectionUriParser.java new file mode 100644 index 000000000..e489b058f --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksConnectionUriParser.java @@ -0,0 +1,155 @@ +package org.lowcoder.plugin.databricks.model; + +import static org.lowcoder.sdk.exception.PluginCommonError.DATASOURCE_ARGUMENT_ERROR; +import static org.lowcoder.sdk.util.ExceptionUtils.ofPluginException; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.commons.lang3.StringUtils; + +public final class DatabricksConnectionUriParser { + + /* + * Regex for the full Databricks JDBC URL. + * Uses named capture groups for hostname, port, httpPath, authMech, and optionalParams. + * + * Breakdown with Capture Groups: + * ^jdbc:databricks:\/\/: Matches the literal prefix. + * (?[a-zA-Z0-9\.-]+): Captures the server hostname. + * :(?[0-9]{2,5}): Captures the port number. + * ;httpPath=: Matches the literal string. + * (?(?:sql\/1\.0\/warehouses\/[a-zA-Z0-9]+|sql\/protocolv1\/o\/[0-9]+\/[0-9]{4}-[0-9]{6}-[a-zA-Z0-9]+)): Captures the httpPath. + * ;AuthMech=(?[0-9]+): Captures the AuthMech value. + * (?(?:;[a-zA-Z0-9_]+=[^;]+)*): Captures all optional parameters as a single block. + * ;?$: Matches an optional trailing semicolon and asserts the end of the string. + */ + public static final String DATABRICKS_URL_REGEX = "^jdbc:databricks:\\/\\/(?[a-zA-Z0-9\\.-]+):(?[0-9]{2,5});httpPath=(?(?:sql\\/1\\.0\\/warehouses\\/[a-zA-Z0-9]+|sql\\/protocolv1\\/o\\/[0-9]+\\/[0-9]{4}-[0-9]{6}-[a-zA-Z0-9]+));AuthMech=(?[0-9]+)(?(?:;[a-zA-Z0-9_]+=[^;]+)*);?$"; + + /* + * Regex for parsing individual key-value pairs within the optionalParams string. + * Uses named capture groups for key and value. + */ + public static final String DATABRICKS_OPTIONAL_PARAMS_REGEX = "(?[a-zA-Z0-9_]+)=(?[^;]+)"; + + private static final Pattern URL_PATTERN = Pattern.compile(DATABRICKS_URL_REGEX); + private static final Pattern PARAM_PATTERN = Pattern.compile(DATABRICKS_OPTIONAL_PARAMS_REGEX); + + // Keys for the extracted information map + public static final String KEY_HOSTNAME = "hostname"; + public static final String KEY_PORT = "port"; + public static final String KEY_HTTP_PATH = "httpPath"; + public static final String KEY_AUTH_MECH = "authMech"; + public static final String KEY_OPTIONAL_PARAMS = "optionalParams"; // This will hold a nested map of optional params + + // Common optional parameters that might be specifically looked for + public static final String OPT_KEY_CONN_CATALOG = "ConnCatalog"; + public static final String OPT_KEY_CONN_SCHEMA = "ConnSchema"; + public static final String OPT_KEY_UID = "UID"; // Typically "token" + public static final String OPT_KEY_PWD = "PWD"; // Personal Access Token + + public static boolean isValid(String uri) { + return URL_PATTERN.asMatchPredicate().test(uri); + } + + public static Map extractInfoFromConnectionStringURI(String uri) { + if (!isValid(uri)) { // Use the isValid method to check first + return null; + } + + Matcher matcher = URL_PATTERN.matcher(uri); + if (matcher.find()) { + Map extractedInfoMap = new HashMap<>(); + + // Extract main components using named capture groups + extractedInfoMap.put(KEY_HOSTNAME, matcher.group("hostname")); + extractedInfoMap.put(KEY_PORT, matcher.group("port")); + extractedInfoMap.put(KEY_HTTP_PATH, matcher.group("httpPath")); + extractedInfoMap.put(KEY_AUTH_MECH, matcher.group("authMech")); + + // Extract and parse optional parameters + String optionalParamsRaw = matcher.group("optionalParams"); + Map parsedOptionalParams = new HashMap<>(); + + if (StringUtils.isNotBlank(optionalParamsRaw)) { + Matcher paramMatcher = PARAM_PATTERN.matcher(optionalParamsRaw); + while (paramMatcher.find()) { + String key = paramMatcher.group("key"); + String value = paramMatcher.group("value"); + parsedOptionalParams.put(key, value); + } + } + extractedInfoMap.put(KEY_OPTIONAL_PARAMS, parsedOptionalParams); + + return extractedInfoMap; + } + + return null; + } + + /** + * Parses the default database (schema) from the connection URI. + * This now relies on the 'ConnSchema' optional parameter. + * + * @param uri The Databricks JDBC connection URI. + * @return The database (schema) name. + * @throws org.lowcoder.sdk.exception.PluginException if the URI is invalid or the schema is missing. + */ + public static String parseDatabaseFrom(String uri) { + Map extractedInfo = extractInfoFromConnectionStringURI(uri); + if (extractedInfo == null) { + throw ofPluginException(DATASOURCE_ARGUMENT_ERROR, "INVALID_DATABRICKS_URL"); + } + + // Get the map of optional parameters + @SuppressWarnings("unchecked") // Safe cast given our controlled map structure + Map optionalParams = (Map) extractedInfo.get(KEY_OPTIONAL_PARAMS); + + String database = null; + if (optionalParams != null) { + database = optionalParams.get(OPT_KEY_CONN_SCHEMA); + } + + if (StringUtils.isBlank(database)) { + throw ofPluginException(DATASOURCE_ARGUMENT_ERROR, "DATABRICKS_DATABASE_EMPTY"); + } + return database; + } + + // You might want to add similar methods for retrieving ConnCatalog, UID, PWD, etc. + public static String parseCatalogFrom(String uri) { + Map extractedInfo = extractInfoFromConnectionStringURI(uri); + if (extractedInfo == null) { + throw ofPluginException(DATASOURCE_ARGUMENT_ERROR, "INVALID_DATABRICKS_URL"); + } + @SuppressWarnings("unchecked") + Map optionalParams = (Map) extractedInfo.get(KEY_OPTIONAL_PARAMS); + + String catalog = null; + if (optionalParams != null) { + catalog = optionalParams.get(OPT_KEY_CONN_CATALOG); + } + // Whether to throw error if catalog is blank depends on your plugin's requirements. + // For Databricks, ConnCatalog is often optional. + return catalog; // Can return null if not present + } + + public static String parseAuthTokenFrom(String uri) { + Map extractedInfo = extractInfoFromConnectionStringURI(uri); + if (extractedInfo == null) { + throw ofPluginException(DATASOURCE_ARGUMENT_ERROR, "INVALID_DATABRICKS_URL"); + } + @SuppressWarnings("unchecked") + Map optionalParams = (Map) extractedInfo.get(KEY_OPTIONAL_PARAMS); + + String token = null; + if (optionalParams != null) { + token = optionalParams.get(OPT_KEY_PWD); // The password field holds the token + } + return token; // Can return null if not present or using other auth + } + + // Add other helper methods as needed for specific optional parameters +} \ No newline at end of file diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksDatasourceConfig.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksDatasourceConfig.java new file mode 100644 index 000000000..a252fc6ed --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksDatasourceConfig.java @@ -0,0 +1,155 @@ +package org.lowcoder.plugin.databricks.model; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonView; +import com.google.common.annotations.VisibleForTesting; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.jackson.Jacksonized; +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.StringUtils; +import org.lowcoder.sdk.config.JsonViews; +import org.lowcoder.sdk.exception.PluginCommonError; +import org.lowcoder.sdk.models.DatasourceConnectionConfig; +import org.lowcoder.sdk.models.Endpoint; + +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.apache.commons.lang3.ObjectUtils.firstNonNull; +import static org.lowcoder.plugin.databricks.model.DatabricksConnectionUriParser.parseDatabaseFrom; +import static org.lowcoder.sdk.exception.BizError.INVALID_DATASOURCE_CONFIG_TYPE; +import static org.lowcoder.sdk.util.ExceptionUtils.ofException; +import static org.lowcoder.sdk.util.ExceptionUtils.ofPluginException; +import static org.lowcoder.sdk.util.JsonUtils.fromJson; +import static org.lowcoder.sdk.util.JsonUtils.toJson; + +@Getter +@Builder +@Jacksonized +public class DatabricksDatasourceConfig implements DatasourceConnectionConfig { + private final String database; + private final String username; + + @JsonView(JsonViews.Internal.class) + private String password; + + private final String host; + private final Long port; + private final boolean usingSsl; + private final boolean usingUri; + + @JsonView(JsonViews.Internal.class) + private String jdbcUri; + + private final String catalog; + private final String httpPath; + private final DatabricksAuthMechanism authMechanism; + + private static final long DEFAULT_PORT = 443L; + + @JsonCreator + private DatabricksDatasourceConfig(String database, String username, String password, String host, Long port, boolean usingSsl, + boolean usingUri, String jdbcUri, String catalog, String httpPath, DatabricksAuthMechanism authMechanism) { + this.database = database; + this.username = username; + this.password = password; + this.host = host; + this.port = port; + this.usingSsl = usingSsl; + this.usingUri = usingUri; + this.jdbcUri = jdbcUri; + this.catalog = catalog; + this.httpPath = httpPath; + this.authMechanism = authMechanism != null ? authMechanism : DatabricksAuthMechanism.DEFAULT; + } + + + public static DatabricksDatasourceConfig buildFrom(Map requestMap) { + DatabricksDatasourceConfig result = fromJson(toJson(requestMap), DatabricksDatasourceConfig.class); + if (result == null) { + throw ofPluginException(PluginCommonError.DATASOURCE_ARGUMENT_ERROR, "INVALID_DATABRICKS_CONFIG"); + } + return result; + } + + public String getCatalog() { + return StringUtils.trimToEmpty(catalog); + } + + public String getHttpPath() { + return StringUtils.trimToEmpty(httpPath); + } + + public DatabricksAuthMechanism getAuthMechanism() { + return authMechanism; + } + + public String getUri() { + return jdbcUri; + } + + public String getSchema() { + return getDatabase(); + } + + public boolean isUsingUri() { + return usingUri; + } + + public Long getPort() { + return port == null ? DEFAULT_PORT : port; + } + + @Override + public DatasourceConnectionConfig mergeWithUpdatedConfig(DatasourceConnectionConfig updated) { + if (!(updated instanceof DatabricksDatasourceConfig)) { + throw ofException(INVALID_DATASOURCE_CONFIG_TYPE, "INVALID_DATASOURCE_CONFIG_TYPE", updated.getClass().getSimpleName()); + } + DatabricksDatasourceConfig upd = (DatabricksDatasourceConfig) updated; + + if (upd.isUsingUri()) { + return DatabricksDatasourceConfig.builder() + .usingUri(true) + .jdbcUri(firstNonNull(upd.getUri(), getUri())) + .password(firstNonNull(upd.getPassword(), this.getPassword())) + .build(); + } + + return DatabricksDatasourceConfig.builder() + .usingUri(false) + .usingSsl(upd.isUsingSsl()) + .catalog(upd.getCatalog()) + .httpPath(upd.getHttpPath()) + .authMechanism(upd.getAuthMechanism()) + .username(upd.getUsername()) + .password(firstNonNull(upd.getPassword(), this.getPassword())) + .host(upd.getHost()) + .port(upd.getPort()) + .build(); + } + + @Override + public DatasourceConnectionConfig doEncrypt(Function encryptFunc) { + password = encryptFunc.apply(password); + jdbcUri = encryptFunc.apply(jdbcUri); + return this; + } + + @Override + public DatasourceConnectionConfig doDecrypt(Function decryptFunc) { + password = decryptFunc.apply(password); + jdbcUri = decryptFunc.apply(jdbcUri); + return this; + } + + @JsonIgnore + public String getParsedDatabase() { + if (usingUri) { + return parseDatabaseFrom(jdbcUri); + } + return getDatabase(); + } +} \ No newline at end of file diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksQueryConfig.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksQueryConfig.java new file mode 100644 index 000000000..135c1550d --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/model/DatabricksQueryConfig.java @@ -0,0 +1,75 @@ +package org.lowcoder.plugin.databricks.model; + +import static org.lowcoder.sdk.exception.PluginCommonError.INVALID_QUERY_SETTINGS; +import static org.lowcoder.sdk.util.JsonUtils.fromJson; +import static org.lowcoder.sdk.util.JsonUtils.toJson; + +import java.util.Map; + +import lombok.Builder; +import lombok.extern.jackson.Jacksonized; +import org.apache.commons.collections4.MapUtils; +import org.lowcoder.sdk.exception.PluginException; +import org.lowcoder.plugin.databricks.model.DatabricksAuthMechanism; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Getter; + +@Getter +@Builder +@Jacksonized +public class DatabricksQueryConfig { + + private final String sql; + private final boolean disablePreparedStatement; + private final String mode; + // Use standard names for GUI command support + private final String guiStatementType; + private final Map guiStatementDetail; + + @JsonCreator + public DatabricksQueryConfig( + @JsonProperty("sql") String sql, + @JsonProperty("disablePreparedStatement") boolean disablePreparedStatement, + @JsonProperty("mode") String mode, + @JsonProperty("commandType") String guiStatementType, + @JsonProperty("command") Map guiStatementDetail + ) { + this.sql = sql; + this.disablePreparedStatement = disablePreparedStatement; + this.mode = mode; + this.guiStatementType = guiStatementType; + this.guiStatementDetail = guiStatementDetail; + } + + public static DatabricksQueryConfig from(Map queryConfigs) { + if (MapUtils.isEmpty(queryConfigs)) { + throw new PluginException(INVALID_QUERY_SETTINGS, "DATABRICKS_QUERY_CONFIG_EMPTY"); + } + // Accept both new and legacy keys + DatabricksQueryConfig result = fromJson(toJson(queryConfigs), DatabricksQueryConfig.class); + if (result == null) { + throw new PluginException(INVALID_QUERY_SETTINGS, "INVALID_DATABRICKS_CONFIG_0"); + } + return result; + } + + public boolean isGuiMode() { + return "GUI".equalsIgnoreCase(mode); + } + + public String getSql() { + return sql == null ? null : sql.trim(); + } + + // For compatibility with QueryExecutor + public String getStatementType() { + return guiStatementType; + } + + public Map getStatementDetail() { + return guiStatementDetail; + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksResultParser.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksResultParser.java new file mode 100644 index 000000000..c726ea045 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksResultParser.java @@ -0,0 +1,43 @@ +package org.lowcoder.plugin.databricks.util; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableSet; + +public class DatabricksResultParser { + + private static final Set TIMESTAMP_TYPES = ImmutableSet.of("timestamp", "datetime"); + + public static Map parseRowValue(ResultSet resultSet, ResultSetMetaData metaData, int colCount) throws SQLException { + Map row = new LinkedHashMap<>(); + for (int i = 1; i <= colCount; i++) { + String typeName = metaData.getColumnTypeName(i); + Object value = getValue(resultSet, i, typeName); + row.put(metaData.getColumnName(i), value); + } + return row; + } + + private static Object getValue(ResultSet resultSet, int i, String typeName) throws SQLException { + if (resultSet.getObject(i) == null) { + return null; + } + if ("date".equalsIgnoreCase(typeName) || "time".equalsIgnoreCase(typeName)) { + return resultSet.getString(i); + } + if (TIMESTAMP_TYPES.contains(typeName.toLowerCase())) { + return DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(resultSet.getTimestamp(i).toLocalDateTime()); + } + if ("datetimeoffset".equalsIgnoreCase(typeName)) { + return DateTimeFormatter.ISO_DATE_TIME.format(resultSet.getObject(i, OffsetDateTime.class)); + } + return resultSet.getObject(i); + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksStructureParser.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksStructureParser.java new file mode 100644 index 000000000..223df17a1 --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/java/org/lowcoder/plugin/databricks/util/DatabricksStructureParser.java @@ -0,0 +1,50 @@ +package org.lowcoder.plugin.databricks.util; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Map; + +import org.lowcoder.sdk.models.DatasourceStructure.Column; +import org.lowcoder.sdk.models.DatasourceStructure.Table; +import org.lowcoder.sdk.models.DatasourceStructure.TableType; + +public class DatabricksStructureParser { + + public static final String COLUMNS_QUERY = """ + SELECT + table_schema as "table_schema", + table_schema || '.' || table_name as "table_name", + column_name as "column_name", + data_type as "column_type", + ordinal_position as "ordinal_position", + column_default as "column_default", + is_nullable as "is_nullable" + FROM information_schema.columns + ORDER BY table_name, ordinal_position + """; + + public static void parseTableAndColumns(Map tablesByName, Statement statement) throws SQLException { + try (ResultSet columnsResultSet = statement.executeQuery(COLUMNS_QUERY)) { + while (columnsResultSet.next()) { + String tableName = columnsResultSet.getString("table_name"); + String schema = columnsResultSet.getString("table_schema"); + + Table table = tablesByName.computeIfAbsent(tableName, __ -> new Table( + TableType.TABLE, schema, tableName, + new ArrayList<>(), + new ArrayList<>(), + new ArrayList<>() + )); + + table.addColumn(new Column( + columnsResultSet.getString("column_name"), + columnsResultSet.getString("column_type"), + null, + false + )); + } + } + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-icon.svg b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-icon.svg new file mode 100644 index 000000000..78ef0bffb --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-icon.svg @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-plugin.json b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-plugin.json new file mode 100644 index 000000000..5c17b79ce --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/main/resources/databricks-plugin.json @@ -0,0 +1,67 @@ +{ + "type": "databricks", + "name": "Databricks", + "icon": "databricks-icon.svg", + "category": "Database", + "properties": [ + { + "name": "host", + "label": "Server Hostname", + "type": "text", + "required": true, + "placeholder": "adb-xxxxxxxxxxxxxxxx.xx.azuredatabricks.net" + }, + { + "name": "port", + "label": "Port", + "type": "number", + "required": true, + "defaultValue": 443 + }, + { + "name": "httpPath", + "label": "HTTP Path", + "type": "text", + "required": true, + "placeholder": "/sql/1.0/warehouses/xxxxxxxxxxxxxxxx" + }, + { + "name": "catalog", + "label": "Catalog", + "type": "text", + "required": true, + "placeholder": "hive_metastore" + }, + { + "name": "schema", + "label": "Schema", + "type": "text", + "placeholder": "default" + }, + { + "name": "authentication", + "label": "Authentication", + "type": "select", + "required": true, + "options": [ + { "label": "Personal Access Token", "value": "pat" } + ], + "defaultValue": "pat" + }, + { + "name": "token", + "label": "Personal Access Token", + "type": "password", + "required": true, + "placeholder": "dapi_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + } + ], + "features": { + "query": true, + "insert": true, + "update": true, + "delete": true, + "listTables": true, + "getTable": true + } +} diff --git a/server/api-service/lowcoder-plugins/databricksPlugin/src/test/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutorTest.java b/server/api-service/lowcoder-plugins/databricksPlugin/src/test/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutorTest.java new file mode 100644 index 000000000..d2a84383d --- /dev/null +++ b/server/api-service/lowcoder-plugins/databricksPlugin/src/test/java/org/lowcoder/plugin/databricks/DatabricksQueryExecutorTest.java @@ -0,0 +1,43 @@ +package org.lowcoder.plugin.databricks; + +import org.junit.jupiter.api.Test; +import org.lowcoder.sdk.models.DatasourceStructure; +import org.lowcoder.sdk.plugin.common.sql.SqlBasedDatasourceConnectionConfig; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +class DatabricksQueryExecutorTest { + + @Test + void testGetDatabaseMetadata() throws Exception { + DatabricksQueryExecutor executor = new DatabricksQueryExecutor(); + Connection conn = mock(Connection.class); + Statement stmt = mock(Statement.class); + ResultSet rs = mock(ResultSet.class); + + when(conn.createStatement()).thenReturn(stmt); + when(stmt.executeQuery(anyString())).thenReturn(rs); + when(rs.next()).thenReturn(true, false); + when(rs.getString("table_name")).thenReturn("my_table"); + when(rs.getString("table_schema")).thenReturn("default"); + when(rs.getString("column_name")).thenReturn("id"); + when(rs.getString("data_type")).thenReturn("int"); + + SqlBasedDatasourceConnectionConfig config = mock(SqlBasedDatasourceConnectionConfig.class); + when(config.getExtParams()).thenReturn(Map.of("catalog", "hive_metastore", "schema", "default")); + + DatasourceStructure structure = executor.getDatabaseMetadata(conn, config); + + assertNotNull(structure); + assertEquals(1, structure.getTables().size()); + assertEquals("my_table", structure.getTables().get(0).getName()); + assertEquals(1, structure.getTables().get(0).getColumns().size()); + assertEquals("id", structure.getTables().get(0).getColumns().get(0).getName()); + } +} diff --git a/server/api-service/lowcoder-plugins/pom.xml b/server/api-service/lowcoder-plugins/pom.xml index 647d1ba88..960ef07fd 100644 --- a/server/api-service/lowcoder-plugins/pom.xml +++ b/server/api-service/lowcoder-plugins/pom.xml @@ -162,6 +162,11 @@ snowflakePlugin ${revision} + + org.lowcoder + databricksPlugin + ${revision} + @@ -223,5 +228,6 @@ graphqlPlugin sqlBasedPlugin snowflakePlugin + databricksPlugin