feat: Databricks plugin (#29746)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- **New Features**
- Introduced a Databricks plugin for executing queries and managing
database connections.
- Added a migration to incorporate the Databricks plugin into existing
workspaces.

- **Bug Fixes**
- Ensured robust error handling in the Databricks plugin with clear
messaging for query execution failures.

- **Tests**
- Implemented tests to validate the behavior of the Databricks plugin
under various connection scenarios.

- **Documentation**
	- Included configuration properties for the Databricks plugin setup.

- **Refactor**
- Added specific error types and messages for the Databricks plugin to
improve debugging and user feedback.

- **Chores**
- Modified the Java runtime environment settings to support the new
plugin's requirements.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: Arpit Mohan <arpit@appsmith.com>
This commit is contained in:
Trisha Anand 2023-12-26 10:04:09 +05:30 committed by GitHub
parent 295975c47c
commit 0331d987de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 785 additions and 4 deletions

View File

@ -5,7 +5,7 @@
<option name="INCLUDE_PROVIDED_SCOPE" value="true" />
<option name="MAIN_CLASS_NAME" value="com.appsmith.server.ServerApplication" />
<module name="server" />
<option name="VM_PARAMETERS" value="-Dpf4j.mode=development -Dpf4j.pluginsDir=appsmith-plugins --add-opens java.base/java.time=ALL-UNNAMED" />
<option name="VM_PARAMETERS" value="-Dpf4j.mode=development -Dpf4j.pluginsDir=appsmith-plugins --add-opens java.base/java.time=ALL-UNNAMED --add-opens java.base/java.nio=ALL-UNNAMED" />
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="true" />
<option name="IS_SUBST" value="false" />
@ -13,8 +13,8 @@
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" />
<ENTRY IS_ENABLED="true" PARSER="env" PATH=".env" />
<ENTRY IS_ENABLED="true" PARSER="runconfig" IS_EXECUTABLE="false" />
<ENTRY IS_ENABLED="true" PARSER="env" IS_EXECUTABLE="false" PATH=".env" />
</ENTRIES>
</extension>
<method v="2">

View File

@ -15,6 +15,7 @@ public interface PluginConstants {
String OPEN_AI_PLUGIN = "openai-plugin";
String ANTHROPIC_PLUGIN = "anthropic-plugin";
String GOOGLE_AI_PLUGIN = "googleai-plugin";
String DATABRICKS_PLUGIN = "databricks-plugin";
}
public static final String DEFAULT_REST_DATASOURCE = "DEFAULT_REST_DATASOURCE";
@ -41,6 +42,7 @@ public interface PluginConstants {
public static final String OPEN_AI_PLUGIN_NAME = "Open AI";
public static final String ANTHROPIC_PLUGIN_NAME = "Anthropic";
public static final String GOOGLE_AI_PLUGIN_NAME = "Google AI";
public static final String DATABRICKS_PLUGIN_NAME = "Databricks";
}
interface HostName {

View File

@ -0,0 +1,104 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.appsmith</groupId>
<artifactId>appsmith-plugins</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<groupId>com.external.plugins</groupId>
<artifactId>databricksPlugin</artifactId>
<version>1.0-SNAPSHOT</version>
<name>databricksPlugin</name>
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>com.databricks</groupId>
<artifactId>databricks-sdk-java</artifactId>
<version>0.14.0</version>
</dependency>
<dependency>
<groupId>com.databricks</groupId>
<artifactId>databricks-jdbc</artifactId>
<version>2.6.36</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>5.0.1</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson-bom.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jdk8</artifactId>
<version>${jackson-bom.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson-bom.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>32.0.1-jre</version>
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<version>3.2.11.RELEASE</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.1.0</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<goals>
<goal>copy-dependencies</goal>
</goals>
<phase>package</phase>
<configuration>
<includeScope>runtime</includeScope>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,331 @@
package com.external.plugins;
import com.appsmith.external.exceptions.pluginExceptions.AppsmithPluginError;
import com.appsmith.external.exceptions.pluginExceptions.AppsmithPluginException;
import com.appsmith.external.exceptions.pluginExceptions.StaleConnectionException;
import com.appsmith.external.models.ActionConfiguration;
import com.appsmith.external.models.ActionExecutionResult;
import com.appsmith.external.models.BearerTokenAuth;
import com.appsmith.external.models.DatasourceConfiguration;
import com.appsmith.external.models.DatasourceStructure;
import com.appsmith.external.plugins.BasePlugin;
import com.appsmith.external.plugins.PluginExecutor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.ObjectUtils;
import org.pf4j.Extension;
import org.pf4j.PluginWrapper;
import org.springframework.util.StringUtils;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_CLOSED_ERROR_MSG;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_INVALID_ERROR_MSG;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_NULL_ERROR_MSG;
import static com.appsmith.external.helpers.PluginUtils.getColumnsListForJdbcPlugin;
import static com.external.plugins.exceptions.DatabricksErrorMessages.QUERY_EXECUTION_FAILED_ERROR_MSG;
import static com.external.plugins.exceptions.DatabricksPluginError.QUERY_EXECUTION_FAILED;
public class DatabricksPlugin extends BasePlugin {
private static final String JDBC_DRIVER = "com.databricks.client.jdbc.Driver";
public static final int VALIDITY_CHECK_TIMEOUT = 5;
private static final int INITIAL_ROWLIST_CAPACITY = 50;
private static final int CATALOG_INDEX = 2;
private static final int SCHEMA_INDEX = 3;
private static final int CONFIGURATION_TYPE_INDEX = 0;
private static final int JDBC_URL_INDEX = 5;
private static final long DEFAULT_PORT = 443L;
private static final int HTTP_PATH_INDEX = 1;
private static final String FORM_PROPERTIES_CONFIGURATION = "FORM_PROPERTIES_CONFIGURATION";
private static final String JDBC_URL_CONFIGURATION = "JDBC_URL_CONFIGURATION";
private static final String TABLES_QUERY =
"""
SELECT TABLE_SCHEMA as schema_name, table_name,
column_name, data_type, is_nullable,
column_default
FROM system.INFORMATION_SCHEMA.COLUMNS where table_schema <> 'information_schema'
""";
public DatabricksPlugin(PluginWrapper wrapper) {
super(wrapper);
}
@Slf4j
@Extension
public static class DatabricksPluginExecutor implements PluginExecutor<Connection> {
@Override
public Mono<ActionExecutionResult> execute(
Connection connection,
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String query = actionConfiguration.getBody();
List<Map<String, Object>> rowsList = new ArrayList<>(INITIAL_ROWLIST_CAPACITY);
final List<String> columnsList = new ArrayList<>();
return (Mono<ActionExecutionResult>) Mono.fromCallable(() -> {
try {
// Check for connection validity :
if (connection == null) {
return Mono.error(new StaleConnectionException(CONNECTION_NULL_ERROR_MSG));
} else if (connection.isClosed()) {
return Mono.error(new StaleConnectionException(CONNECTION_CLOSED_ERROR_MSG));
} else if (!connection.isValid(VALIDITY_CHECK_TIMEOUT)) {
/**
* Not adding explicit `!sqlConnectionFromPool.isValid(VALIDITY_CHECK_TIMEOUT)`
* check here because this check may take few seconds to complete hence adding
* extra time delay.
*/
return Mono.error(new StaleConnectionException(CONNECTION_INVALID_ERROR_MSG));
}
} catch (SQLException error) {
error.printStackTrace();
// This should not happen ideally.
System.out.println(
"Error checking validity of Databricks connection : " + error.getMessage());
}
try {
// We can proceed since the connection is valid.
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(query);
ResultSetMetaData metaData = resultSet.getMetaData();
int colCount = metaData.getColumnCount();
columnsList.addAll(getColumnsListForJdbcPlugin(metaData));
while (resultSet.next()) {
// Use `LinkedHashMap` here so that the column ordering is preserved in the response.
Map<String, Object> row = new LinkedHashMap<>(colCount);
for (int i = 1; i <= colCount; i++) {
Object value;
Object resultSetObject = resultSet.getObject(i);
if (resultSetObject == null) {
value = null;
} else {
value = resultSetObject;
}
row.put(metaData.getColumnName(i), value);
}
rowsList.add(row);
}
} catch (SQLException e) {
return Mono.error(new AppsmithPluginException(
QUERY_EXECUTION_FAILED,
QUERY_EXECUTION_FAILED_ERROR_MSG,
e.getMessage(),
"SQLSTATE: " + e.getSQLState()));
}
ActionExecutionResult result = new ActionExecutionResult();
result.setBody(objectMapper.valueToTree(rowsList));
result.setIsExecutionSuccess(true);
return Mono.just(result);
})
.flatMap(obj -> obj)
.subscribeOn(Schedulers.boundedElastic());
}
@Override
public Mono<Connection> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
// Ensure the databricks JDBC driver is loaded.
try {
Class.forName(JDBC_DRIVER);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
BearerTokenAuth bearerTokenAuth = (BearerTokenAuth) datasourceConfiguration.getAuthentication();
Properties p = new Properties();
p.put("UID", "token");
p.put("PWD", bearerTokenAuth.getBearerToken() == null ? "" : bearerTokenAuth.getBearerToken());
String url;
if (JDBC_URL_CONFIGURATION.equals(datasourceConfiguration
.getProperties()
.get(CONFIGURATION_TYPE_INDEX)
.getValue())) {
url = (String) datasourceConfiguration
.getProperties()
.get(JDBC_URL_INDEX)
.getValue();
} else if (FORM_PROPERTIES_CONFIGURATION.equals(datasourceConfiguration
.getProperties()
.get(CONFIGURATION_TYPE_INDEX)
.getValue())) {
// Set up the connection URL
StringBuilder urlBuilder = new StringBuilder("jdbc:databricks://");
List<String> hosts = datasourceConfiguration.getEndpoints().stream()
.map(endpoint ->
endpoint.getHost() + ":" + ObjectUtils.defaultIfNull(endpoint.getPort(), DEFAULT_PORT))
.collect(Collectors.toList());
urlBuilder.append(String.join(",", hosts)).append(";");
url = urlBuilder.toString();
p.put(
"httpPath",
datasourceConfiguration
.getProperties()
.get(HTTP_PATH_INDEX)
.getValue());
p.put("AuthMech", "3");
// Always enable SSL for Databricks connections.
p.put("SSL", "1");
} else {
url = "";
}
return (Mono<Connection>) Mono.fromCallable(() -> {
Connection connection = DriverManager.getConnection(url, p);
// Execute statements to default catalog and schema for all queries on this datasource.
if (FORM_PROPERTIES_CONFIGURATION.equals(datasourceConfiguration
.getProperties()
.get(CONFIGURATION_TYPE_INDEX)
.getValue())) {
try (Statement statement = connection.createStatement(); ) {
String catalog = (String) datasourceConfiguration
.getProperties()
.get(CATALOG_INDEX)
.getValue();
if (!StringUtils.hasText(catalog)) {
catalog = "samples";
}
String useCatalogQuery = "USE CATALOG " + catalog;
statement.execute(useCatalogQuery);
} catch (SQLException e) {
return Mono.error(new AppsmithPluginException(
AppsmithPluginError.PLUGIN_EXECUTE_ARGUMENT_ERROR,
"The Appsmith server has failed to change the catalog.",
e.getMessage()));
}
try (Statement statement = connection.createStatement(); ) {
String schema = (String) datasourceConfiguration
.getProperties()
.get(SCHEMA_INDEX)
.getValue();
if (!StringUtils.hasText(schema)) {
schema = "default";
}
String useSchemaQuery = "USE SCHEMA " + schema;
statement.execute(useSchemaQuery);
} catch (SQLException e) {
return Mono.error(new AppsmithPluginException(
AppsmithPluginError.PLUGIN_EXECUTE_ARGUMENT_ERROR,
"The Appsmith server has failed to change the schema",
e.getMessage()));
}
}
return Mono.just(connection);
})
.flatMap(obj -> obj)
.subscribeOn(Schedulers.boundedElastic());
}
@Override
public void datasourceDestroy(Connection connection) {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
// This should not happen ideally.
System.out.println("Error closing Databricks connection : " + e.getMessage());
}
}
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
return new HashSet<>();
}
@Override
public Mono<DatasourceStructure> getStructure(
Connection connection, DatasourceConfiguration datasourceConfiguration) {
return Mono.fromSupplier(() -> {
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName =
new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
try (Statement statement = connection.createStatement();
ResultSet columnsResultSet = statement.executeQuery(TABLES_QUERY)) {
while (columnsResultSet.next()) {
final String schemaName = columnsResultSet.getString("schema_name");
final String tableName = columnsResultSet.getString("table_name");
final String fullTableName = schemaName + "." + tableName;
if (!tablesByName.containsKey(fullTableName)) {
tablesByName.put(
fullTableName,
new DatasourceStructure.Table(
DatasourceStructure.TableType.TABLE,
schemaName,
fullTableName,
new ArrayList<>(),
new ArrayList<>(),
new ArrayList<>()));
}
final DatasourceStructure.Table table = tablesByName.get(fullTableName);
final String defaultExpr = columnsResultSet.getString("column_default");
table.getColumns()
.add(new DatasourceStructure.Column(
columnsResultSet.getString("column_name"),
columnsResultSet.getString("data_type"),
defaultExpr,
null));
}
structure.setTables(new ArrayList<>(tablesByName.values()));
for (DatasourceStructure.Table table : structure.getTables()) {
table.getKeys().sort(Comparator.naturalOrder());
}
log.debug("Got the structure of Databricks DB");
return structure;
} catch (SQLException e) {
return Mono.error(new AppsmithPluginException(
AppsmithPluginError.PLUGIN_GET_STRUCTURE_ERROR,
"The Appsmith server has failed to fetch the structure of your schema.",
e.getMessage(),
"SQLSTATE: " + e.getSQLState()));
}
})
.map(resultStructure -> (DatasourceStructure) resultStructure)
.subscribeOn(Schedulers.boundedElastic());
}
}
}

View File

@ -0,0 +1,10 @@
package com.external.plugins.exceptions;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
@NoArgsConstructor(access = AccessLevel.PRIVATE) // To prevent instantiation
public class DatabricksErrorMessages {
public static final String QUERY_EXECUTION_FAILED_ERROR_MSG = "Your query failed to execute. ";
}

View File

@ -0,0 +1,67 @@
package com.external.plugins.exceptions;
import com.appsmith.external.exceptions.AppsmithErrorAction;
import com.appsmith.external.exceptions.pluginExceptions.BasePluginError;
import com.appsmith.external.models.ErrorType;
import lombok.Getter;
import java.text.MessageFormat;
@Getter
public enum DatabricksPluginError implements BasePluginError {
QUERY_EXECUTION_FAILED(
500,
"PE-DBK-5000",
"{0}",
AppsmithErrorAction.DEFAULT,
"Query execution error",
ErrorType.INTERNAL_ERROR,
"{1}",
"{2}"),
;
private final Integer httpErrorCode;
private final String appErrorCode;
private final String message;
private final String title;
private final AppsmithErrorAction errorAction;
private final ErrorType errorType;
private final String downstreamErrorMessage;
private final String downstreamErrorCode;
DatabricksPluginError(
Integer httpErrorCode,
String appErrorCode,
String message,
AppsmithErrorAction errorAction,
String title,
ErrorType errorType,
String downstreamErrorMessage,
String downstreamErrorCode) {
this.httpErrorCode = httpErrorCode;
this.appErrorCode = appErrorCode;
this.errorType = errorType;
this.errorAction = errorAction;
this.message = message;
this.title = title;
this.downstreamErrorMessage = downstreamErrorMessage;
this.downstreamErrorCode = downstreamErrorCode;
}
public String getMessage(Object... args) {
return new MessageFormat(this.message).format(args);
}
public String getErrorType() {
return this.errorType.toString();
}
public String getDownstreamErrorMessage(Object... args) {
return replacePlaceholderWithValue(this.downstreamErrorMessage, args);
}
public String getDownstreamErrorCode(Object... args) {
return replacePlaceholderWithValue(this.downstreamErrorCode, args);
}
}

View File

@ -0,0 +1,16 @@
{
"editor": [
{
"controlType": "SECTION",
"identifier": "SELECTOR",
"children": [
{
"label": "",
"configProperty": "actionConfiguration.body",
"controlType": "QUERY_DYNAMIC_TEXT",
"evaluationSubstitutionType": "TEMPLATE"
}
]
}
]
}

View File

@ -0,0 +1,119 @@
{
"form": [
{
"sectionName": "Details",
"id": 1,
"children": [
{
"label": "Configuration method",
"configProperty": "datasourceConfiguration.properties[0].value",
"controlType": "DROP_DOWN",
"isRequired": true,
"initialValue": "FORM_PROPERTIES_CONFIGURATION",
"options": [
{
"label": "Use JDBC URL",
"value": "JDBC_URL_CONFIGURATION"
},
{
"label": "Use form properties",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
]
},
{
"label": "Host",
"configProperty": "datasourceConfiguration.endpoints[0].host",
"controlType": "INPUT_TEXT",
"isRequired": true,
"placeholderText": "",
"initialValue": "",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
},
{
"label": "Port",
"configProperty": "datasourceConfiguration.endpoints[0].port",
"dataType": "NUMBER",
"controlType": "INPUT_TEXT",
"placeholderText": "443",
"initialValue" : "443",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
},
{
"label": "HTTP Path",
"configProperty": "datasourceConfiguration.properties[1].value",
"controlType": "INPUT_TEXT",
"isRequired": true,
"placeholderText": "/sql/1.0/warehouses/<id>",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
},
{
"label": "Default catalog",
"configProperty": "datasourceConfiguration.properties[2].value",
"controlType": "INPUT_TEXT",
"isRequired": false,
"initialValue": "samples",
"placeholderText": "samples",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
},
{
"label": "Default schema",
"configProperty": "datasourceConfiguration.properties[3].value",
"controlType": "INPUT_TEXT",
"isRequired": false,
"initialValue": "default",
"placeholderText": "default",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "FORM_PROPERTIES_CONFIGURATION"
}
},
{
"label": "JDBC URL",
"configProperty": "datasourceConfiguration.properties[5].value",
"controlType": "INPUT_TEXT",
"isRequired": false,
"placeholderText": "jdbc:databricks://<host>:<port>/<schema>;transportMode=http;ssl=1;AuthMech=3;httpPath=<path>;ConnCatalog=<catalog>",
"hidden": {
"path": "datasourceConfiguration.properties[0].value",
"comparison": "NOT_EQUALS",
"value": "JDBC_URL_CONFIGURATION"
}
},
{
"label": "Authentication type",
"configProperty": "datasourceConfiguration.authentication.authenticationType",
"controlType": "INPUT_TEXT",
"initialValue" : "bearerToken",
"hidden" : true
},
{
"label": "Personal access token",
"configProperty": "datasourceConfiguration.authentication.bearerToken",
"controlType": "INPUT_TEXT",
"dataType": "PASSWORD",
"initialValue": "",
"isRequired": true,
"encrypted": true
}
]
}
]
}

View File

@ -0,0 +1,5 @@
plugin.id=databricks-plugin
plugin.class=com.external.plugins.DatabricksPlugin
plugin.version=1.0-SNAPSHOT
plugin.provider=tech@appsmith.com
plugin.dependencies=

View File

@ -0,0 +1,68 @@
package com.external.plugins;
import com.appsmith.external.exceptions.pluginExceptions.StaleConnectionException;
import com.appsmith.external.models.ActionConfiguration;
import com.appsmith.external.models.ActionExecutionResult;
import com.appsmith.external.models.DatasourceConfiguration;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.sql.Connection;
import java.sql.SQLException;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_CLOSED_ERROR_MSG;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_INVALID_ERROR_MSG;
import static com.appsmith.external.exceptions.pluginExceptions.BasePluginErrorMessages.CONNECTION_NULL_ERROR_MSG;
import static com.external.plugins.DatabricksPlugin.VALIDITY_CHECK_TIMEOUT;
public class DatabricksPluginTest {
public DatabricksPlugin.DatabricksPluginExecutor databricksPluginExecutor;
@BeforeEach
public void setUp() {
databricksPluginExecutor = new DatabricksPlugin.DatabricksPluginExecutor();
}
@Test
public void testExecuteNullConnection() {
Mono<ActionExecutionResult> executionResultMono =
databricksPluginExecutor.execute(null, new DatasourceConfiguration(), new ActionConfiguration());
StepVerifier.create(executionResultMono)
.expectErrorMatches(throwable -> throwable instanceof StaleConnectionException
&& throwable.getMessage().equals(CONNECTION_NULL_ERROR_MSG))
.verify();
}
@Test
public void testExecuteClosedConnection() throws SQLException {
Connection mockConnection = Mockito.mock(Connection.class);
Mockito.when(mockConnection.isClosed()).thenReturn(true);
Mono<ActionExecutionResult> executionResultMono = databricksPluginExecutor.execute(
mockConnection, new DatasourceConfiguration(), new ActionConfiguration());
StepVerifier.create(executionResultMono)
.expectErrorMatches(throwable -> throwable instanceof StaleConnectionException
&& throwable.getMessage().equals(CONNECTION_CLOSED_ERROR_MSG))
.verify();
}
@Test
public void testExecuteInvalidConnection() throws SQLException {
Connection mockConnection = Mockito.mock(Connection.class);
Mockito.when(mockConnection.isValid(VALIDITY_CHECK_TIMEOUT)).thenReturn(false);
Mono<ActionExecutionResult> executionResultMono = databricksPluginExecutor.execute(
mockConnection, new DatasourceConfiguration(), new ActionConfiguration());
StepVerifier.create(executionResultMono)
.expectErrorMatches(throwable -> throwable instanceof StaleConnectionException
&& throwable.getMessage().equals(CONNECTION_INVALID_ERROR_MSG))
.verify();
}
}

View File

@ -16,7 +16,8 @@
"label": "Bearer token",
"value": "bearerToken"
}
]
],
"hidden" : true
},
{
"label": "API Key",

View File

@ -61,9 +61,13 @@
<module>smtpPlugin</module>
<module>openAiPlugin</module>
<module>anthropicPlugin</module>
<module>googleAiPlugin</module>
<module>databricksPlugin</module>
</modules>
<properties>

View File

@ -0,0 +1,53 @@
package com.appsmith.server.migrations.db.ce;
import com.appsmith.external.constants.PluginConstants;
import com.appsmith.external.models.PluginType;
import com.appsmith.server.domains.Plugin;
import io.mongock.api.annotations.ChangeUnit;
import io.mongock.api.annotations.Execution;
import io.mongock.api.annotations.RollbackExecution;
import lombok.extern.slf4j.Slf4j;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.data.mongodb.core.MongoTemplate;
import static com.appsmith.server.migrations.DatabaseChangelog1.installPluginToAllWorkspaces;
@Slf4j
@ChangeUnit(order = "039", id = "add-databricks-plugin", author = " ")
public class Migration039AddDatabricksPlugin {
private final MongoTemplate mongoTemplate;
public Migration039AddDatabricksPlugin(MongoTemplate mongoTemplate) {
this.mongoTemplate = mongoTemplate;
}
@RollbackExecution
public void rollbackExecution() {}
@Execution
public void addPluginToDbAndWorkspace() {
Plugin plugin = new Plugin();
plugin.setName(PluginConstants.PluginName.DATABRICKS_PLUGIN_NAME);
plugin.setType(PluginType.DB);
plugin.setPluginName(PluginConstants.PluginName.DATABRICKS_PLUGIN_NAME);
plugin.setPackageName(PluginConstants.PackageName.DATABRICKS_PLUGIN);
plugin.setUiComponent("UQIDbEditorForm");
plugin.setDatasourceComponent("DbEditorForm");
plugin.setResponseType(Plugin.ResponseType.JSON);
plugin.setIconLocation("https://assets.appsmith.com/databricks-logo.svg");
plugin.setDocumentationLink("https://docs.appsmith.com/connect-data/reference/databricks");
plugin.setDefaultInstall(true);
try {
mongoTemplate.insert(plugin);
} catch (DuplicateKeyException e) {
log.warn(plugin.getPackageName() + " already present in database.");
}
if (plugin.getId() == null) {
log.error("Failed to insert the Databricks plugin into the database.");
}
installPluginToAllWorkspaces(mongoTemplate, plugin.getId());
}
}

View File

@ -69,6 +69,7 @@ sh /opt/appsmith/run-starting-page-init.sh &
# Ref -Dlog4j2.formatMsgNoLookups=true https://spring.io/blog/2021/12/10/log4j2-vulnerability-and-spring-boot
exec java ${APPSMITH_JAVA_ARGS:-} ${APPSMITH_JAVA_HEAP_ARG:-} \
--add-opens java.base/java.time=ALL-UNNAMED \
--add-opens java.base/java.nio=ALL-UNNAMED \
-Dserver.port=8080 \
-XX:+ShowCodeDetailsInExceptionMessages \
-Djava.security.egd=file:/dev/./urandom \