fix: logger-not-working-in-plugins (#36231)

Fixes #36073 

Hi @NilanshBansal 

**Issue :**

**Missing Logging Implementation :**

- Without a logging implementation (such as SLF4J Simple or Logback) in
the project's classpath, the logging statements in the plugins cannot be
executed.
- As a result, no log output is being printed to the terminal.

**Solution :**

The solution is to add a logging implementation to the plugins parent
pom. In this case, you can add the slf4j-simple dependency to your
pom.xml file. This will provide a simple logging implementation that
will output log statements to the console.

```
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.36</version>
</dependency>

<dependency>
    <groupId>org.slf4j</groupId>
    <artifactId>slf4j-simple</artifactId>
    <version>1.7.36</version> 
</dependency>
```
**Explanation:**

- slf4j-api provides the SLF4J API, which is the interface for logging.
  provides the SLF4J API, which is the interface for logging.
- slf4j-simple provides a simple implementation of the SLF4J API, which
is responsible for actually printing the log messages to the console.

**Screenshots :** 

Amazon S3 Plugin and Postgres Plugin 

![image](https://github.com/user-attachments/assets/d3e90b96-2b02-493d-8ffa-44e2aa348fc1)


<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

## Summary by CodeRabbit

- **New Features**
- Enhanced logging capabilities with the integration of SLF4J API and
SLF4J Simple implementations.

- **Improvements**
- Improved log management and output formatting for better monitoring
and debugging across various plugins, transitioning from standard output
to structured logging.
- Refined logging practices in multiple plugins to support better
maintainability and performance.
- Removed method for console logging from the Stopwatch class to
streamline logging practices.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
Anna Hariprasad 2024-09-17 15:17:58 +05:30 committed by GitHub
parent 66c815f1cb
commit 7aae152dee
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 308 additions and 507 deletions

View File

@ -25,18 +25,6 @@ public class Stopwatch {
log.debug("Execute time: {}, Time elapsed: {}ms", this.flow, this.watch.getTime(TimeUnit.MILLISECONDS));
}
/**
* This is a temporary function created to log stopwatch timer within the plugin package
* Due to this bug https://github.com/appsmithorg/appsmith/issues/36073 logging is not working in the plugin package
*/
public void stopAndLogTimeInMillisWithSysOut() {
if (!this.watch.isStopped()) {
this.watch.stop();
}
System.out.println(String.format(
"Execute time: %s, Time elapsed: %dms", this.flow, this.watch.getTime(TimeUnit.MILLISECONDS)));
}
public void stopTimer() {
if (!this.watch.isStopped()) {
this.watch.stop();

View File

@ -416,9 +416,7 @@ public class AmazonS3Plugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for AmazonS3 plugin.");
final Map<String, Object> formData = actionConfiguration.getFormData();
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -467,8 +465,7 @@ public class AmazonS3Plugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for AmazonS3 plugin.");
final String[] query = new String[1];
Map<String, Object> requestProperties = new HashMap<>();
List<RequestParamDTO> requestParams = new ArrayList<>();
@ -544,7 +541,7 @@ public class AmazonS3Plugin extends BasePlugin {
Object actionResult;
switch (s3Action) {
case LIST:
System.out.println(
log.debug(
Thread.currentThread().getName() + ": LIST action called for AmazonS3 plugin.");
String prefix = getDataValueSafelyFromFormData(formData, LIST_PREFIX, STRING_TYPE, "");
requestParams.add(new RequestParamDTO(LIST_PREFIX, prefix, null, null, null));
@ -644,7 +641,7 @@ public class AmazonS3Plugin extends BasePlugin {
break;
case UPLOAD_FILE_FROM_BODY: {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": UPLOAD_FILE_FROM_BODY action called for AmazonS3 plugin.");
requestParams.add(
new RequestParamDTO(ACTION_CONFIGURATION_PATH, path, null, null, null));
@ -697,7 +694,7 @@ public class AmazonS3Plugin extends BasePlugin {
break;
}
case UPLOAD_MULTIPLE_FILES_FROM_BODY: {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": UPLOAD_MULTIPLE_FILES_FROM_BODY action called for AmazonS3 plugin.");
requestParams.add(
new RequestParamDTO(ACTION_CONFIGURATION_PATH, path, null, null, null));
@ -751,7 +748,7 @@ public class AmazonS3Plugin extends BasePlugin {
break;
}
case READ_FILE:
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": READ_FILE action called for AmazonS3 plugin.");
requestParams.add(
new RequestParamDTO(ACTION_CONFIGURATION_PATH, path, null, null, null));
@ -770,7 +767,7 @@ public class AmazonS3Plugin extends BasePlugin {
actionResult = Map.of("fileData", result);
break;
case DELETE_FILE:
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": DELETE_FILE action called for AmazonS3 plugin.");
requestParams.add(
new RequestParamDTO(ACTION_CONFIGURATION_PATH, path, null, null, null));
@ -783,7 +780,7 @@ public class AmazonS3Plugin extends BasePlugin {
actionResult = Map.of("status", "File deleted successfully");
break;
case DELETE_MULTIPLE_FILES:
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": DELETE_MULTIPLE_FILES action called for AmazonS3 plugin.");
requestParams.add(
new RequestParamDTO(ACTION_CONFIGURATION_PATH, path, null, null, null));
@ -823,7 +820,7 @@ public class AmazonS3Plugin extends BasePlugin {
ActionExecutionResult actionExecutionResult = new ActionExecutionResult();
actionExecutionResult.setBody(result);
actionExecutionResult.setIsExecutionSuccess(true);
System.out.println("In the S3 Plugin, got action execution result");
log.debug("In the S3 Plugin, got action execution result");
return Mono.just(actionExecutionResult);
})
.onErrorResume(e -> {
@ -842,7 +839,7 @@ public class AmazonS3Plugin extends BasePlugin {
})
// Now set the request in the result to be returned to the server
.map(actionExecutionResult -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": Setting the actionExecutionResult for AmazonS3 plugin.");
ActionExecutionRequest actionExecutionRequest = new ActionExecutionRequest();
actionExecutionRequest.setQuery(query[0]);
@ -892,8 +889,7 @@ public class AmazonS3Plugin extends BasePlugin {
@Override
public Mono<AmazonS3> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for AmazonS3 plugin.");
try {
Class.forName(S3_DRIVER);
} catch (ClassNotFoundException e) {
@ -921,17 +917,14 @@ public class AmazonS3Plugin extends BasePlugin {
@Override
public void datasourceDestroy(AmazonS3 connection) {
String printMessage =
Thread.currentThread().getName() + ": datasourceDestroy() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for AmazonS3 plugin.");
if (connection != null) {
Mono.fromCallable(() -> {
connection.shutdown();
return connection;
})
.onErrorResume(exception -> {
System.out.println("Error closing S3 connection.");
exception.printStackTrace();
log.error("Error closing S3 connection.", exception.getMessage());
return Mono.empty();
})
.subscribeOn(scheduler)
@ -941,9 +934,7 @@ public class AmazonS3Plugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for AmazonS3 plugin.");
Set<String> invalids = new HashSet<>();
if (datasourceConfiguration == null || datasourceConfiguration.getAuthentication() == null) {
@ -1005,8 +996,7 @@ public class AmazonS3Plugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for AmazonS3 plugin.");
if (datasourceConfiguration == null) {
return Mono.just(new DatasourceTestResult(
S3ErrorMessages.DS_AT_LEAST_ONE_MANDATORY_PARAMETER_MISSING_ERROR_MSG));
@ -1029,7 +1019,7 @@ public class AmazonS3Plugin extends BasePlugin {
* object with wrong credentials does not throw any exception.
* - Hence, adding a listBuckets() method call to test the connection.
*/
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": listBuckets() called for AmazonS3 plugin.");
connection.listBuckets();
return new DatasourceTestResult();
@ -1068,7 +1058,7 @@ public class AmazonS3Plugin extends BasePlugin {
return datasourceCreate(datasourceConfiguration)
.flatMap(connection -> Mono.fromCallable(() -> {
connection.listObjects(defaultBucket);
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": connection.listObjects() called for AmazonS3 plugin.");
return new DatasourceTestResult();
})
@ -1095,12 +1085,11 @@ public class AmazonS3Plugin extends BasePlugin {
public Mono<DatasourceStructure> getStructure(
AmazonS3 connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for AmazonS3 plugin.");
return Mono.fromSupplier(() -> {
List<DatasourceStructure.Table> tableList;
try {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": connection.listBuckets() called for AmazonS3 plugin.");
tableList = connection.listBuckets().stream()
/* Get name of each bucket */
@ -1150,9 +1139,7 @@ public class AmazonS3Plugin extends BasePlugin {
Object... args) {
String jsonBody = (String) input;
Param param = (Param) args[0];
String printMessage =
Thread.currentThread().getName() + ": substituteValueInInput() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": substituteValueInInput() called for AmazonS3 plugin.");
return DataTypeStringUtils.jsonSmartReplacementPlaceholderWithValue(
jsonBody, value, null, insertedParams, null, param);
}
@ -1198,9 +1185,8 @@ public class AmazonS3Plugin extends BasePlugin {
@Override
public Mono<Void> sanitizeGenerateCRUDPageTemplateInfo(
List<ActionConfiguration> actionConfigurationList, Object... args) {
String printMessage = Thread.currentThread().getName()
+ ": sanitizeGenerateCRUDPageTemplateInfo() called for AmazonS3 plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": sanitizeGenerateCRUDPageTemplateInfo() called for AmazonS3 plugin.");
if (isEmpty(actionConfigurationList)) {
return Mono.empty();
}

View File

@ -12,6 +12,7 @@ import com.appsmith.external.models.DBAuth;
import com.appsmith.external.models.DatasourceConfiguration;
import com.appsmith.external.models.Property;
import com.external.plugins.exceptions.S3ErrorMessages;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import java.util.List;
@ -26,6 +27,7 @@ import static com.external.plugins.constants.S3PluginConstants.CUSTOM_ENDPOINT_R
import static com.external.plugins.constants.S3PluginConstants.S3_SERVICE_PROVIDER_PROPERTY_INDEX;
import static com.external.utils.DatasourceUtils.S3ServiceProvider.AMAZON;
@Slf4j
public class DatasourceUtils {
/**
@ -101,7 +103,7 @@ public class DatasourceUtils {
*/
public static AmazonS3ClientBuilder getS3ClientBuilder(DatasourceConfiguration datasourceConfiguration)
throws AppsmithPluginException {
System.out.println(Thread.currentThread().getName() + ": getS3ClientBuilder action called.");
log.debug(Thread.currentThread().getName() + ": getS3ClientBuilder action called.");
DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
String accessKey = authentication.getUsername();
String secretKey = authentication.getPassword();

View File

@ -74,8 +74,7 @@ public class AnthropicPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Anthropic plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Anthropic plugin.");
final ApiKeyAuth apiKeyAuth = (ApiKeyAuth) datasourceConfiguration.getAuthentication();
if (!StringUtils.hasText(apiKeyAuth.getValue())) {
return Mono.error(new AppsmithPluginException(
@ -113,9 +112,7 @@ public class AnthropicPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for Anthropic plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for Anthropic plugin.");
// Get prompt from action configuration
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -254,8 +251,7 @@ public class AnthropicPlugin extends BasePlugin {
@Override
public Mono<TriggerResultDTO> trigger(
APIConnection connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for Anthropic plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for Anthropic plugin.");
final ApiKeyAuth apiKeyAuth = (ApiKeyAuth) datasourceConfiguration.getAuthentication();
if (!StringUtils.hasText(apiKeyAuth.getValue())) {
return Mono.error(new AppsmithPluginException(
@ -317,9 +313,7 @@ public class AnthropicPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for Anthropic plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Anthropic plugin.");
return RequestUtils.validateApiKeyAuthDatasource(datasourceConfiguration);
}

View File

@ -65,9 +65,7 @@ public class AppsmithAiPlugin extends BasePlugin {
@Override
public Mono<APIConnection> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": datasourceCreate() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for AppsmithAI plugin.");
ApiKeyAuth apiKeyAuth = new ApiKeyAuth();
apiKeyAuth.setValue("test-key");
return ApiKeyAuthentication.create(apiKeyAuth)
@ -81,8 +79,7 @@ public class AppsmithAiPlugin extends BasePlugin {
@Override
public Mono<TriggerResultDTO> trigger(
APIConnection connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for AppsmithAI plugin.");
SourceDetails sourceDetails = SourceDetails.createSourceDetails(request);
String requestType = request.getRequestType();
if (UPLOAD_FILES.equals(requestType)) {
@ -148,9 +145,7 @@ public class AppsmithAiPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for AppsmithAI plugin.");
// Get input from action configuration
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -170,8 +165,7 @@ public class AppsmithAiPlugin extends BasePlugin {
List<Map.Entry<String, String>> insertedParams,
ExecuteActionDTO executeActionDTO) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for AppsmithAI plugin.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
initUtils.initializeResponseWithError(errorResult);
@ -209,16 +203,13 @@ public class AppsmithAiPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration, boolean isEmbedded) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for AppsmithAI plugin.");
return Set.of();
}
@Override
public Mono<DatasourceStorage> preSaveHook(DatasourceStorage datasourceStorage) {
String printMessage = Thread.currentThread().getName() + ": preSaveHook() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": preSaveHook() called for AppsmithAI plugin.");
return aiServerService
.associateDatasource(createAssociateDTO(datasourceStorage))
.thenReturn(datasourceStorage);
@ -226,8 +217,7 @@ public class AppsmithAiPlugin extends BasePlugin {
@Override
public Mono<DatasourceStorage> preDeleteHook(DatasourceStorage datasourceStorage) {
String printMessage = Thread.currentThread().getName() + ": preDeleteHook() called for AppsmithAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": preDeleteHook() called for AppsmithAI plugin.");
DatasourceConfiguration datasourceConfiguration = datasourceStorage.getDatasourceConfiguration();
if (hasFiles(datasourceConfiguration)) {
return aiServerService

View File

@ -85,8 +85,7 @@ public class ArangoDBPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for ArangoDB plugin.");
if (!isConnectionValid(db)) {
return Mono.error(new StaleConnectionException(CONNECTION_INVALID_ERROR_MSG));
}
@ -101,7 +100,7 @@ public class ArangoDBPlugin extends BasePlugin {
}
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": got action execution result from ArangoDB plugin.");
ArangoCursor<Map> cursor = db.query(query, null, null, Map.class);
ActionExecutionResult result = new ActionExecutionResult();
@ -179,11 +178,9 @@ public class ArangoDBPlugin extends BasePlugin {
@Override
public Mono<ArangoDatabase> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for ArangoDB plugin.");
return (Mono<ArangoDatabase>) Mono.fromCallable(() -> {
System.out.println(
Thread.currentThread().getName() + ": inside schdeuled thread from ArangoDB plugin.");
log.debug(Thread.currentThread().getName() + ": inside schdeuled thread from ArangoDB plugin.");
List<Endpoint> nonEmptyEndpoints = datasourceConfiguration.getEndpoints().stream()
.filter(endpoint -> isNonEmptyEndpoint(endpoint))
.collect(Collectors.toList());
@ -263,17 +260,13 @@ public class ArangoDBPlugin extends BasePlugin {
@Override
public void datasourceDestroy(ArangoDatabase db) {
String printMessage =
Thread.currentThread().getName() + ": datasourceDestroy() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for ArangoDB plugin.");
db.arango().shutdown();
}
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for ArangoDB plugin.");
Set<String> invalids = new HashSet<>();
DBAuth auth = (DBAuth) datasourceConfiguration.getAuthentication();
@ -318,14 +311,13 @@ public class ArangoDBPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(ArangoDatabase connection) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for ArangoDB plugin.");
return Mono.fromCallable(() -> {
connection.getVersion();
return new DatasourceTestResult();
})
.onErrorResume(error -> {
System.out.println("Error when testing ArangoDB datasource.");
log.error("Error when testing ArangoDB datasource.");
error.printStackTrace();
return Mono.just(new DatasourceTestResult(arangoDBErrorUtils.getReadableError(error)));
})
@ -337,8 +329,7 @@ public class ArangoDBPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
ArangoDatabase db, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for ArangoDB plugin.");
final DatasourceStructure structure = new DatasourceStructure();
List<DatasourceStructure.Table> tables = new ArrayList<>();
structure.setTables(tables);
@ -358,7 +349,7 @@ public class ArangoDBPlugin extends BasePlugin {
return Flux.fromIterable(collections)
.filter(collectionEntity -> !collectionEntity.getIsSystem())
.flatMap(collectionEntity -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": got collectionEntity result from ArangoDB plugin.");
final ArrayList<DatasourceStructure.Column> columns = new ArrayList<>();
final ArrayList<DatasourceStructure.Template> templates = new ArrayList<>();
@ -385,7 +376,7 @@ public class ArangoDBPlugin extends BasePlugin {
Mono.just(document));
})
.flatMap(tuple -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": generating templates and structure in ArangoDB plugin.");
final ArrayList<DatasourceStructure.Column> columns = tuple.getT1();
final ArrayList<DatasourceStructure.Template> templates = tuple.getT2();
@ -403,9 +394,8 @@ public class ArangoDBPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for ArangoDB plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for ArangoDB plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier

View File

@ -60,13 +60,12 @@ public class AwsLambdaPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for AWS Lambda plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for AWS Lambda plugin.");
Map<String, Object> formData = actionConfiguration.getFormData();
String command = getDataValueSafelyFromFormData(formData, "command", STRING_TYPE);
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": creating action execution result for AWS Lambda plugin.");
ActionExecutionResult result;
switch (Objects.requireNonNull(command)) {
@ -94,8 +93,7 @@ public class AwsLambdaPlugin extends BasePlugin {
@Override
public Mono<TriggerResultDTO> trigger(
AWSLambda connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for AWS Lambda plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for AWS Lambda plugin.");
if (!StringUtils.hasText(request.getRequestType())) {
throw new AppsmithPluginException(
AppsmithPluginError.PLUGIN_EXECUTE_ARGUMENT_ERROR, "request type is missing");
@ -149,9 +147,7 @@ public class AwsLambdaPlugin extends BasePlugin {
@Override
public Mono<AWSLambda> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": datasourceCreate() called for AWS Lambda plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for AWS Lambda plugin.");
DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
String accessKey = authentication.getUsername();
String secretKey = authentication.getPassword();
@ -187,8 +183,7 @@ public class AwsLambdaPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(AWSLambda connection) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for AWS Lambda plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for AWS Lambda plugin.");
return Mono.fromCallable(() -> {
/*
* - Please note that as of 28 Jan 2021, the way Amazon client SDK works, creating a connection
@ -218,9 +213,7 @@ public class AwsLambdaPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for AWS Lambda plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for AWS Lambda plugin.");
Set<String> invalids = new HashSet<>();
if (datasourceConfiguration == null
|| datasourceConfiguration.getAuthentication() == null

View File

@ -59,11 +59,11 @@ public class DatabricksPlugin extends BasePlugin {
private static final String TABLES_QUERY =
"""
SELECT TABLE_SCHEMA as schema_name, table_name,
column_name, data_type, is_nullable,
column_default
FROM system.INFORMATION_SCHEMA.COLUMNS where table_schema <> 'information_schema'
""";
SELECT TABLE_SCHEMA as schema_name, table_name,
column_name, data_type, is_nullable,
column_default
FROM system.INFORMATION_SCHEMA.COLUMNS where table_schema <> 'information_schema'
""";
public DatabricksPlugin(PluginWrapper wrapper) {
super(wrapper);
@ -79,15 +79,14 @@ public class DatabricksPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Databricks plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Databricks plugin.");
String query = actionConfiguration.getBody();
List<Map<String, Object>> rowsList = new ArrayList<>(INITIAL_ROWLIST_CAPACITY);
final List<String> columnsList = new ArrayList<>();
return (Mono<ActionExecutionResult>) Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": creating action execution result from Databricks plugin.");
try {
@ -108,8 +107,7 @@ public class DatabricksPlugin extends BasePlugin {
} catch (SQLException error) {
error.printStackTrace();
// This should not happen ideally.
System.out.println(
"Error checking validity of Databricks connection : " + error.getMessage());
log.error("Error checking validity of Databricks connection : " + error.getMessage());
}
ActionExecutionResult result = new ActionExecutionResult();
@ -183,9 +181,7 @@ public class DatabricksPlugin extends BasePlugin {
@Override
public Mono<Connection> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": datasourceCreate() called for Databricks plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Databricks plugin.");
// Ensure the databricks JDBC driver is loaded.
try {
Class.forName(JDBC_DRIVER);
@ -249,8 +245,7 @@ public class DatabricksPlugin extends BasePlugin {
}
return (Mono<Connection>) Mono.fromCallable(() -> {
System.out.println(
Thread.currentThread().getName() + ": creating connection from Databricks plugin.");
log.debug(Thread.currentThread().getName() + ": creating connection from Databricks plugin.");
Connection connection = DriverManager.getConnection(url, p);
// Execute statements to default catalog and schema for all queries on this datasource.
@ -301,16 +296,14 @@ public class DatabricksPlugin extends BasePlugin {
@Override
public void datasourceDestroy(Connection connection) {
String printMessage =
Thread.currentThread().getName() + ": datasourceDestroy() called for Databricks plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for Databricks plugin.");
try {
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
// This should not happen ideally.
System.out.println("Error closing Databricks connection : " + e.getMessage());
log.error("Error closing Databricks connection : " + e.getMessage());
}
}
@ -322,10 +315,9 @@ public class DatabricksPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
Connection connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Databricks plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Databricks plugin.");
return Mono.fromSupplier(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": fetching datasource structure from Databricks plugin.");
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName =

View File

@ -91,8 +91,7 @@ public class DynamoPlugin extends BasePlugin {
private final Scheduler scheduler = Schedulers.boundedElastic();
public Object extractValue(Object rawItem) {
String printMessage = Thread.currentThread().getName() + ": extractValue() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": extractValue() called for Dynamo plugin.");
if (!(rawItem instanceof List) && !(rawItem instanceof Map)) {
return rawItem;
@ -164,9 +163,7 @@ public class DynamoPlugin extends BasePlugin {
*/
public Object getTransformedResponse(Map<String, Object> rawResponse, String action)
throws AppsmithPluginException {
String printMessage =
Thread.currentThread().getName() + ": getTransformedResponse() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getTransformedResponse() called for Dynamo plugin.");
Map<String, Object> transformedResponse = new HashMap<>();
for (Map.Entry<String, Object> responseEntry : rawResponse.entrySet()) {
Object rawItems = responseEntry.getValue();
@ -188,14 +185,13 @@ public class DynamoPlugin extends BasePlugin {
DynamoDbClient ddb,
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Dynamo plugin.");
final Map<String, Object> requestData = new HashMap<>();
final String body = actionConfiguration.getBody();
List<RequestParamDTO> requestParams = new ArrayList<>();
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": creating action execution result from DynamoDB plugin.");
ActionExecutionResult result = new ActionExecutionResult();
@ -286,11 +282,9 @@ public class DynamoPlugin extends BasePlugin {
@Override
public Mono<DynamoDbClient> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Dynamo plugin.");
return Mono.fromCallable(() -> {
System.out.println(
Thread.currentThread().getName() + ": creating dynamodbclient from DynamoDB plugin.");
log.debug(Thread.currentThread().getName() + ": creating dynamodbclient from DynamoDB plugin.");
final DynamoDbClientBuilder builder = DynamoDbClient.builder();
if (!CollectionUtils.isEmpty(datasourceConfiguration.getEndpoints())) {
@ -326,8 +320,7 @@ public class DynamoPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(@NonNull DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Dynamo plugin.");
Set<String> invalids = new HashSet<>();
final DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
@ -352,8 +345,7 @@ public class DynamoPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DynamoDbClient connection) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Dynamo plugin.");
return Mono.fromCallable(() -> {
/*
* - Creating a connection with false credentials does not throw an error. Hence,
@ -367,10 +359,9 @@ public class DynamoPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
DynamoDbClient ddb, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Dynamo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Dynamo plugin.");
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": creating datasourceStructure from DynamoDB plugin.");
final ListTablesResponse listTablesResponse = ddb.listTables();

View File

@ -80,15 +80,14 @@ public class ElasticSearchPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for ElasticSearch plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for ElasticSearch plugin.");
final Map<String, Object> requestData = new HashMap<>();
String query = actionConfiguration.getBody();
List<RequestParamDTO> requestParams = new ArrayList<>();
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": creating action execution result from ElasticSearch plugin.");
final ActionExecutionResult result = new ActionExecutionResult();
@ -152,7 +151,7 @@ public class ElasticSearchPlugin extends BasePlugin {
}
result.setIsExecutionSuccess(true);
System.out.println("In the Elastic Search Plugin, got action execution result");
log.debug("In the Elastic Search Plugin, got action execution result");
return Mono.just(result);
})
.flatMap(obj -> obj)
@ -171,7 +170,7 @@ public class ElasticSearchPlugin extends BasePlugin {
})
// Now set the request in the result to be returned to the server
.map(result -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": setting the request in the result to be returned from ElasticSearch plugin.");
ActionExecutionRequest request = new ActionExecutionRequest();
request.setProperties(requestData);
@ -198,9 +197,7 @@ public class ElasticSearchPlugin extends BasePlugin {
@Override
public Mono<RestClient> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": datasourceCreate() called for ElasticSearch plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for ElasticSearch plugin.");
final List<HttpHost> hosts = new ArrayList<>();
for (Endpoint endpoint : datasourceConfiguration.getEndpoints()) {
@ -255,9 +252,7 @@ public class ElasticSearchPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for ElasticSearch plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for ElasticSearch plugin.");
Set<String> invalids = new HashSet<>();
if (CollectionUtils.isEmpty(datasourceConfiguration.getEndpoints())) {
@ -282,9 +277,7 @@ public class ElasticSearchPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(RestClient connection) {
String printMessage =
Thread.currentThread().getName() + ": testDatasource() called for ElasticSearch plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for ElasticSearch plugin.");
return Mono.fromCallable(() -> {
if (connection == null) {
return new DatasourceTestResult("Null client object to ElasticSearch.");
@ -334,9 +327,8 @@ public class ElasticSearchPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for ElasticSearch plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for ElasticSearch plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier

View File

@ -138,9 +138,7 @@ public class FirestorePlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for Firestore plugin.");
Object smartSubstitutionObject = actionConfiguration.getFormData().getOrDefault(SMART_SUBSTITUTION, TRUE);
Boolean smartJsonSubstitution = TRUE;
if (smartSubstitutionObject instanceof Boolean) {
@ -247,7 +245,7 @@ public class FirestorePlugin extends BasePlugin {
}
try {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.readValue invoked from Firestore plugin.");
return Mono.just(objectMapper.readValue(strBody, HashMap.class));
} catch (IOException e) {
@ -320,7 +318,7 @@ public class FirestorePlugin extends BasePlugin {
})
// Now set the request in the result to be returned to the server
.map(result -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": setting the request in action execution result from Firestore plugin.");
ActionExecutionRequest request = new ActionExecutionRequest();
request.setProperties(requestData);
@ -508,9 +506,7 @@ public class FirestorePlugin extends BasePlugin {
Map<String, Object> mapBody,
String query,
List<RequestParamDTO> requestParams) {
String printMessage =
Thread.currentThread().getName() + ": handleDocumentLevelMethod() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": handleDocumentLevelMethod() called for Firestore plugin.");
return Mono.just(method)
// Get the actual Java method to be called.
.flatMap(method1 -> {
@ -603,9 +599,8 @@ public class FirestorePlugin extends BasePlugin {
List<RequestParamDTO> requestParams,
Set<String> hintMessages,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": handleCollectionLevelMethod() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(
Thread.currentThread().getName() + ": handleCollectionLevelMethod() called for Firestore plugin.");
final CollectionReference collection = connection.collection(path);
if (method == Method.GET_COLLECTION) {
@ -901,9 +896,7 @@ public class FirestorePlugin extends BasePlugin {
@Override
public Mono<Firestore> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": datasourceCreate() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Firestore plugin.");
final DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
final Set<String> errors = validateDatasource(datasourceConfiguration);
@ -919,7 +912,7 @@ public class FirestorePlugin extends BasePlugin {
InputStream serviceAccount = new ByteArrayInputStream(clientJson.getBytes());
return Mono.fromSupplier(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": instantiating googlecredentials object from Firestore plugin.");
GoogleCredentials credentials;
try {
@ -951,13 +944,12 @@ public class FirestorePlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Firestore plugin.");
return datasourceCreate(datasourceConfiguration).flatMap(connection -> {
try {
connection.listCollections();
} catch (FirestoreException e) {
System.out.println("Invalid datasource configuration: " + e.getMessage());
log.error("Invalid datasource configuration: " + e.getMessage());
if (e.getMessage().contains("Metadata operations require admin authentication")) {
DatasourceTestResult datasourceTestResult = new DatasourceTestResult();
datasourceTestResult.setMessages(new HashSet<>(
@ -979,9 +971,7 @@ public class FirestorePlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Firestore plugin.");
final DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
Set<String> invalids = new HashSet<>();
@ -1009,10 +999,9 @@ public class FirestorePlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
Firestore connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Firestore plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Firestore plugin.");
return Mono.fromSupplier(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": invoking connection.listCollections() from Firestore plugin.");
Iterable<CollectionReference> collectionReferences = connection.listCollections();

View File

@ -66,8 +66,7 @@ public class GoogleAiPlugin extends BasePlugin {
*/
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for GoogleAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for GoogleAI plugin.");
final ApiKeyAuth apiKeyAuth = (ApiKeyAuth) datasourceConfiguration.getAuthentication();
if (!StringUtils.hasText(apiKeyAuth.getValue())) {
return Mono.error(new AppsmithPluginException(
@ -98,9 +97,7 @@ public class GoogleAiPlugin extends BasePlugin {
ExecuteActionDTO executeActionDTO,
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for GoogleAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for GoogleAI plugin.");
// Get prompt from action configuration
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -199,16 +196,13 @@ public class GoogleAiPlugin extends BasePlugin {
@Override
public Mono<TriggerResultDTO> trigger(
APIConnection connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for GoogleAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for GoogleAI plugin.");
return Mono.just(new TriggerResultDTO(getDataToMap(GoogleAIConstants.GOOGLE_AI_MODELS)));
}
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for GoogleAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for GoogleAI plugin.");
return RequestUtils.validateApiKeyAuthDatasource(datasourceConfiguration);
}

View File

@ -82,9 +82,7 @@ public class GoogleSheetsPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for GoogleSheets plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for GoogleSheets plugin.");
boolean smartJsonSubstitution;
final Map<String, Object> formData = actionConfiguration.getFormData();
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -143,9 +141,7 @@ public class GoogleSheetsPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeCommon() called for GoogleSheets plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for GoogleSheets plugin.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
errorResult.setStatusCode(GSheetsPluginError.QUERY_EXECUTION_FAILED.getAppErrorCode());
@ -255,7 +251,7 @@ public class GoogleSheetsPlugin extends BasePlugin {
})
.onErrorResume(e -> {
errorResult.setBody(Exceptions.unwrap(e).getMessage());
System.out.println("Received error on Google Sheets action execution");
log.error("Received error on Google Sheets action execution");
e.printStackTrace();
if (!(e instanceof AppsmithPluginException)) {
e = new AppsmithPluginException(
@ -324,8 +320,7 @@ public class GoogleSheetsPlugin extends BasePlugin {
@Override
public Mono<TriggerResultDTO> trigger(
Void connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for GoogleSheets plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for GoogleSheets plugin.");
final TriggerMethod triggerMethod = GoogleSheetsMethodStrategy.getTriggerMethod(request, objectMapper);
MethodConfig methodConfig = new MethodConfig(request);
@ -396,9 +391,8 @@ public class GoogleSheetsPlugin extends BasePlugin {
Map<String, Object> formData,
Map<String, String> mappedColumns,
Map<String, String> pluginSpecificTemplateParams) {
String printMessage =
Thread.currentThread().getName() + ": updateCrudTemplateFormData() called for GoogleSheets plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": updateCrudTemplateFormData() called for GoogleSheets plugin.");
pluginSpecificTemplateParams.forEach((k, v) -> {
if (formData.containsKey(k)) {
setDataValueSafelyInFormData(formData, k, v);
@ -412,9 +406,7 @@ public class GoogleSheetsPlugin extends BasePlugin {
@Override
public Mono<DatasourceConfiguration> getDatasourceMetadata(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": getDatasourceMetadata() called for GoogleSheets plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getDatasourceMetadata() called for GoogleSheets plugin.");
return GetDatasourceMetadataMethod.getDatasourceMetadata(datasourceConfiguration);
}
}

View File

@ -85,9 +85,7 @@ public class GraphQLPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for GraphQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for GraphQL plugin.");
final List<Property> properties = actionConfiguration.getPluginSpecifiedTemplates();
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -164,8 +162,7 @@ public class GraphQLPlugin extends BasePlugin {
ActionConfiguration actionConfiguration,
List<Map.Entry<String, String>> insertedParams) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for GraphQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for GraphQL plugin.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
initUtils.initializeResponseWithError(errorResult);

View File

@ -244,9 +244,7 @@ public class MongoPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for Mongo plugin.");
final Map<String, Object> formData = actionConfiguration.getFormData();
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -308,10 +306,9 @@ public class MongoPlugin extends BasePlugin {
ActionConfiguration actionConfiguration,
List<Map.Entry<String, String>> parameters) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for Mongo plugin.");
if (mongoClient == null) {
System.out.println("Encountered null connection in MongoDB plugin. Reporting back.");
log.debug("Encountered null connection in MongoDB plugin. Reporting back.");
throw new StaleConnectionException(MONGO_CLIENT_NULL_ERROR_MSG);
}
Mono<Document> mongoOutputMono;
@ -319,7 +316,7 @@ public class MongoPlugin extends BasePlugin {
String query;
List<RequestParamDTO> requestParams;
try {
System.out.println(Thread.currentThread().getName() + ": mongoClient.getDatabase from Mongo plugin.");
log.debug(Thread.currentThread().getName() + ": mongoClient.getDatabase from Mongo plugin.");
MongoDatabase database = mongoClient.getDatabase(getDatabaseName(datasourceConfiguration));
final Map<String, Object> formData = actionConfiguration.getFormData();
@ -386,14 +383,14 @@ public class MongoPlugin extends BasePlugin {
`new` field in the command. Let's return that value to the user.
*/
if (outputJson.has(VALUE)) {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.readTree.VALUE from Mongo plugin.");
Stopwatch processStopwatch =
new Stopwatch("Mongo Plugin objectMapper readTree.VALUE");
result.setBody(objectMapper.readTree(
cleanUp(new JSONObject().put(VALUE, outputJson.get(VALUE)))
.toString()));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
}
/*
@ -401,14 +398,14 @@ public class MongoPlugin extends BasePlugin {
results. In case there are no results for find, this key is not present in the result json.
*/
if (outputJson.has("cursor")) {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.readTree.CURSOR from Mongo plugin.");
Stopwatch processStopwatch =
new Stopwatch("Mongo Plugin objectMapper readTree.CURSOR");
JSONArray outputResult = (JSONArray) cleanUp(
outputJson.getJSONObject("cursor").getJSONArray("firstBatch"));
result.setBody(objectMapper.readTree(outputResult.toString()));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
}
/*
@ -417,12 +414,12 @@ public class MongoPlugin extends BasePlugin {
number of documents inserted.
*/
if (outputJson.has("n")) {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.readTree.N from Mongo plugin.");
Stopwatch processStopwatch = new Stopwatch("Mongo Plugin objectMapper readTree.N");
JSONObject body = new JSONObject().put("n", outputJson.getBigInteger("n"));
result.setBody(objectMapper.readTree(body.toString()));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
headerArray.put(body);
}
@ -431,14 +428,14 @@ public class MongoPlugin extends BasePlugin {
documents updated.
*/
if (outputJson.has(N_MODIFIED)) {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.readTree.N_MODIFIED from Mongo plugin.");
Stopwatch processStopwatch =
new Stopwatch("Mongo Plugin objectMapper readTree.N_MODIFIED");
JSONObject body =
new JSONObject().put(N_MODIFIED, outputJson.getBigInteger(N_MODIFIED));
result.setBody(objectMapper.readTree(body.toString()));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
headerArray.put(body);
}
@ -448,12 +445,12 @@ public class MongoPlugin extends BasePlugin {
if (outputJson.has(VALUES)) {
JSONArray outputResult = (JSONArray) cleanUp(outputJson.getJSONArray(VALUES));
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.createObjectNode from Mongo plugin.");
Stopwatch processStopwatch =
new Stopwatch("Mongo Plugin objectMapper createObjectNode");
ObjectNode resultNode = objectMapper.createObjectNode();
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
// Create a JSON structure with the results stored with a key to abide by the
// Server-Client contract of only sending array of objects in result.
@ -461,12 +458,12 @@ public class MongoPlugin extends BasePlugin {
new Stopwatch("Mongo Plugin objectMapper readTree outputResult");
resultNode.putArray(VALUES).addAll((ArrayNode)
objectMapper.readTree(outputResult.toString()));
processStopwatch1.stopAndLogTimeInMillisWithSysOut();
processStopwatch1.stopAndLogTimeInMillis();
Stopwatch processStopwatch2 =
new Stopwatch("Mongo Plugin objectMapper readTree resultNode");
result.setBody(objectMapper.readTree(resultNode.toString()));
processStopwatch2.stopAndLogTimeInMillisWithSysOut();
processStopwatch2.stopAndLogTimeInMillis();
}
/*
@ -477,11 +474,10 @@ public class MongoPlugin extends BasePlugin {
JSONObject statusJson = new JSONObject().put("ok", status);
headerArray.put(statusJson);
System.out.println(
Thread.currentThread().getName() + ": objectMapper readTree for Mongo plugin.");
log.debug(Thread.currentThread().getName() + ": objectMapper readTree for Mongo plugin.");
Stopwatch processStopwatch = new Stopwatch("Mongo Plugin objectMapper readTree");
result.setHeaders(objectMapper.readTree(headerArray.toString()));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
} catch (JsonProcessingException e) {
return Mono.error(new AppsmithPluginException(
MongoPluginError.QUERY_EXECUTION_FAILED,
@ -493,8 +489,7 @@ public class MongoPlugin extends BasePlugin {
})
.onErrorResume(error -> {
if (error instanceof StaleConnectionException) {
System.out.println(
"The mongo connection seems to have been invalidated or doesn't exist anymore");
log.error("The mongo connection seems to have been invalidated or doesn't exist anymore");
return Mono.error(error);
} else if (!(error instanceof AppsmithPluginException)) {
error = new AppsmithPluginException(
@ -509,7 +504,7 @@ public class MongoPlugin extends BasePlugin {
})
// Now set the request in the result to be returned to the server
.map(actionExecutionResult -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": building actionExecutionResult from Mongo plugin.");
ActionExecutionRequest request = new ActionExecutionRequest();
request.setQuery(query);
@ -538,8 +533,7 @@ public class MongoPlugin extends BasePlugin {
*/
@Override
public String sanitizeReplacement(String replacementValue, DataType dataType) {
String printMessage = Thread.currentThread().getName() + ": sanitizeReplacement() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": sanitizeReplacement() called for Mongo plugin.");
replacementValue = removeOrAddQuotesAroundMongoDBSpecialTypes(replacementValue);
if (DataType.BSON_SPECIAL_DATA_TYPES.equals(dataType)) {
@ -568,9 +562,7 @@ public class MongoPlugin extends BasePlugin {
@Override
public ActionConfiguration getSchemaPreviewActionConfig(Template queryTemplate, Boolean isMock) {
String printMessage =
Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for Mongo plugin.");
// For mongo, currently this experiment will only exist for mock DB movies
// Later on we can extend it for all mongo datasources
if (isMock) {
@ -631,12 +623,12 @@ public class MongoPlugin extends BasePlugin {
try {
argWithoutQuotes = matcher.group(4);
if (specialType.isQuotesRequiredAroundParameter()) {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper writeValueAsString for Mongo plugin.");
Stopwatch processStopwatch =
new Stopwatch("Mongo Plugin objectMapper writeValueAsString");
argWithoutQuotes = objectMapper.writeValueAsString(argWithoutQuotes);
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
}
} catch (JsonProcessingException e) {
throw new AppsmithPluginException(
@ -758,12 +750,10 @@ public class MongoPlugin extends BasePlugin {
a user that doesn't have write permissions on the database.
Ref: https://api.mongodb.com/java/2.13/com/mongodb/DB.html#setReadOnly-java.lang.Boolean-
*/
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Mongo plugin.");
return Mono.just(datasourceConfiguration)
.flatMap(dsConfig -> {
System.out.println(
Thread.currentThread().getName() + ": buildClientURI called from Mongo plugin.");
log.debug(Thread.currentThread().getName() + ": buildClientURI called from Mongo plugin.");
try {
return Mono.just(buildClientURI(dsConfig));
} catch (AppsmithPluginException e) {
@ -803,8 +793,7 @@ public class MongoPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Mongo plugin.");
Set<String> invalids = new HashSet<>();
List<Property> properties = datasourceConfiguration.getProperties();
DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
@ -893,8 +882,7 @@ public class MongoPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Mongo plugin.");
Function<TimeoutException, Throwable> timeoutExceptionThrowableFunction =
error -> new AppsmithPluginException(
AppsmithPluginError.PLUGIN_DATASOURCE_TIMEOUT_ERROR,
@ -902,8 +890,7 @@ public class MongoPlugin extends BasePlugin {
return datasourceCreate(datasourceConfiguration)
.flatMap(mongoClient -> {
System.out.println(
Thread.currentThread().getName() + ":Finding list of databases for Mongo plugin.");
log.debug(Thread.currentThread().getName() + ":Finding list of databases for Mongo plugin.");
final Publisher<String> result = mongoClient.listDatabaseNames();
final Mono<List<String>> documentMono =
Flux.from(result).collectList().cache();
@ -963,8 +950,7 @@ public class MongoPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
MongoClient mongoClient, DatasourceConfiguration datasourceConfiguration, Boolean isMock) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Mongo plugin.");
final DatasourceStructure structure = new DatasourceStructure();
List<DatasourceStructure.Table> tables = new ArrayList<>();
structure.setTables(tables);
@ -1096,9 +1082,8 @@ public class MongoPlugin extends BasePlugin {
*/
@Override
public void extractAndSetNativeQueryFromFormData(ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": extractAndSetNativeQueryFromFormData() called for Mongo plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": extractAndSetNativeQueryFromFormData() called for Mongo plugin.");
Map<String, Object> formData = actionConfiguration.getFormData();
if (formData != null && !formData.isEmpty()) {
/* If it is not raw command, then it must be one of the mongo form commands */

View File

@ -135,9 +135,7 @@ public class MssqlPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for MSSQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for MSSQL plugin.");
String query = actionConfiguration.getBody();
// Check for query parameter before performing the probably expensive fetch connection from the pool op.
if (!StringUtils.hasLength(query)) {
@ -186,8 +184,7 @@ public class MssqlPlugin extends BasePlugin {
List<MustacheBindingToken> mustacheValuesInOrder,
ExecuteActionDTO executeActionDTO) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for MSSQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for MSSQL plugin.");
final Map<String, Object> requestData = new HashMap<>();
requestData.put("preparedStatement", TRUE.equals(preparedStatement) ? true : false);
@ -198,8 +195,7 @@ public class MssqlPlugin extends BasePlugin {
List.of(new RequestParamDTO(ACTION_CONFIGURATION_BODY, transformedQuery, null, null, psParams));
return Mono.fromCallable(() -> {
System.out.println(
Thread.currentThread().getName() + ": within mono callable from MSSQL plugin.");
log.debug(Thread.currentThread().getName() + ": within mono callable from MSSQL plugin.");
boolean isResultSet;
Connection sqlConnectionFromPool;
Statement statement = null;
@ -227,7 +223,7 @@ public class MssqlPlugin extends BasePlugin {
if (sqlConnectionFromPool == null
|| sqlConnectionFromPool.isClosed()
|| !sqlConnectionFromPool.isValid(VALIDITY_CHECK_TIMEOUT)) {
System.out.println("Encountered stale connection in MsSQL plugin. Reporting back.");
log.debug("Encountered stale connection in MsSQL plugin. Reporting back.");
if (sqlConnectionFromPool == null) {
return Mono.error(new StaleConnectionException(CONNECTION_NULL_ERROR_MSG));
@ -246,7 +242,7 @@ public class MssqlPlugin extends BasePlugin {
// This exception is thrown only when the timeout to `isValid` is negative. Since, that's
// not the case,
// here, this should never happen.
System.out.println("Error checking validity of MsSQL connection.");
log.error("Error checking validity of MsSQL connection.");
error.printStackTrace();
}
@ -306,11 +302,11 @@ public class MssqlPlugin extends BasePlugin {
}
ActionExecutionResult result = new ActionExecutionResult();
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper.valueToTree invoked from MSSQL plugin.");
Stopwatch processStopwatch = new Stopwatch("MSSQL Plugin objectMapper valueToTree");
result.setBody(objectMapper.valueToTree(rowsList));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
result.setMessages(populateHintMessages(columnsList));
result.setIsExecutionSuccess(true);
return Mono.just(result);
@ -328,7 +324,7 @@ public class MssqlPlugin extends BasePlugin {
})
// Now set the request in the result to be returned back to the server
.map(actionExecutionResult -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": setting request in the actionExecutionResult from MSSQL plugin.");
ActionExecutionRequest request = new ActionExecutionRequest();
request.setQuery(query);
@ -359,10 +355,9 @@ public class MssqlPlugin extends BasePlugin {
@Override
public Mono<HikariDataSource> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for MSSQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for MSSQL plugin.");
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName() + ": Connecting to SQL Server db");
log.debug(Thread.currentThread().getName() + ": Connecting to SQL Server db");
return createConnectionPool(datasourceConfiguration);
})
.subscribeOn(scheduler);
@ -377,8 +372,7 @@ public class MssqlPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(@NonNull DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for MSSQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for MSSQL plugin.");
Set<String> invalids = new HashSet<>();
if (isEmpty(datasourceConfiguration.getEndpoints())) {
@ -422,8 +416,7 @@ public class MssqlPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
HikariDataSource connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for MSSQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for MSSQL plugin.");
return MssqlDatasourceUtils.getStructure(connection, datasourceConfiguration);
}

View File

@ -168,7 +168,7 @@ public class MySqlPlugin extends BasePlugin {
* supported by PreparedStatement. In case of PreparedStatement turned off, the action and datasource configurations are
* prepared (binding replacement) using PluginExecutor.variableSubstitution
*
* @param connectionContext : This is the connection that is established to the data source. This connection is according
* @param connectionContext : This is the connection that is established to the data source. This connection is according
* to the parameters in Datasource Configuration
* @param executeActionDTO : This is the data structure sent by the client during execute. This contains the params
* which would be used for substitution
@ -183,9 +183,7 @@ public class MySqlPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for MySQL plugin.");
final Map<String, Object> requestData = new HashMap<>();
Boolean isPreparedStatement;
@ -244,9 +242,7 @@ public class MySqlPlugin extends BasePlugin {
@Override
public ActionConfiguration getSchemaPreviewActionConfig(Template queryTemplate, Boolean isMock) {
String printMessage =
Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for MySQL plugin.");
ActionConfiguration actionConfig = new ActionConfiguration();
// Sets query body
actionConfig.setBody(queryTemplate.getBody());
@ -262,9 +258,8 @@ public class MySqlPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": getEndpointIdentifierForRateLimit() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for MySQL plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
SSHConnection sshProxy = datasourceConfiguration.getSshProxy();
String identifier = "";
@ -295,8 +290,7 @@ public class MySqlPlugin extends BasePlugin {
List<MustacheBindingToken> mustacheValuesInOrder,
ExecuteActionDTO executeActionDTO,
Map<String, Object> requestData) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for MySQL plugin.");
ConnectionPool connectionPool = connectionContext.getConnection();
SSHTunnelContext sshTunnelContext = connectionContext.getSshTunnelContext();
String query = actionConfiguration.getBody();
@ -386,7 +380,7 @@ public class MySqlPlugin extends BasePlugin {
Optional<PoolMetrics> poolMetricsOptional = connectionPool.getMetrics();
if (poolMetricsOptional.isPresent()) {
PoolMetrics poolMetrics = poolMetricsOptional.get();
System.out.println("Execute query: connection Pool Metrics: Acquired: "
log.debug("Execute query: connection Pool Metrics: Acquired: "
+ poolMetrics.acquiredSize() + ", Pending: "
+ poolMetrics.pendingAcquireSize() + ", Allocated: "
+ poolMetrics.allocatedSize() + ", idle: " + poolMetrics.idleSize()
@ -397,16 +391,15 @@ public class MySqlPlugin extends BasePlugin {
return resultMono
.map(res -> {
ActionExecutionResult result = new ActionExecutionResult();
System.out.println(
Thread.currentThread().getName()
+ ": objectMapper.valueToTree from MySQL plugin.");
log.debug(Thread.currentThread().getName()
+ ": objectMapper.valueToTree from MySQL plugin.");
Stopwatch processStopwatch =
new Stopwatch("MySQL Plugin objectMapper valueToTree");
result.setBody(objectMapper.valueToTree(rowsList));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
result.setMessages(populateHintMessages(columnsList));
result.setIsExecutionSuccess(true);
System.out.println("In the MySqlPlugin, got action execution result");
log.debug("In the MySqlPlugin, got action execution result");
return result;
})
.onErrorResume(error -> {
@ -446,7 +439,7 @@ public class MySqlPlugin extends BasePlugin {
})
// Now set the request in the result to be returned to the server
.map(actionExecutionResult -> {
System.out.println(
log.debug(
Thread.currentThread().getName()
+ ": setting the request in actionExecutionResult from MySQL plugin.");
ActionExecutionRequest request = new ActionExecutionRequest();
@ -491,7 +484,7 @@ public class MySqlPlugin extends BasePlugin {
return Flux.from(connectionStatement.execute());
}
System.out.println("Query : " + query);
log.debug("Query : " + query);
List<Map.Entry<String, String>> parameters = new ArrayList<>();
try {
@ -516,8 +509,7 @@ public class MySqlPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(ConnectionContext<ConnectionPool> connectionContext) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for MySQL plugin.");
ConnectionPool pool = connectionContext.getConnection();
return Mono.just(pool)
.flatMap(p -> p.create())
@ -670,8 +662,7 @@ public class MySqlPlugin extends BasePlugin {
@Override
public Mono<ConnectionContext<ConnectionPool>> datasourceCreate(
DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for MySQL plugin.");
return Mono.just(datasourceConfiguration).flatMap(ignore -> {
ConnectionContext<ConnectionPool> connectionContext;
try {
@ -688,8 +679,7 @@ public class MySqlPlugin extends BasePlugin {
@Override
public void datasourceDestroy(ConnectionContext<ConnectionPool> connectionContext) {
String printMessage = Thread.currentThread().getName() + ": datasourceDestroy() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for MySQL plugin.");
Mono.just(connectionContext)
.flatMap(ignore -> {
SSHTunnelContext sshTunnelContext = connectionContext.getSshTunnelContext();
@ -704,7 +694,7 @@ public class MySqlPlugin extends BasePlugin {
sshTunnelContext.getSshClient().disconnect();
sshTunnelContext.getThread().stop();
} catch (IOException e) {
System.out.println("Failed to destroy SSH tunnel context: " + e.getMessage());
log.error("Failed to destroy SSH tunnel context: " + e.getMessage());
}
}
@ -722,8 +712,7 @@ public class MySqlPlugin extends BasePlugin {
connectionPool
.disposeLater()
.onErrorResume(exception -> {
System.out.println("Could not destroy MySQL connection pool");
exception.printStackTrace();
log.debug("Could not destroy MySQL connection pool", exception);
return Mono.empty();
})
.subscribeOn(scheduler)
@ -733,16 +722,14 @@ public class MySqlPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for MySQL plugin.");
return MySqlDatasourceUtils.validateDatasource(datasourceConfiguration);
}
@Override
public Mono<DatasourceStructure> getStructure(
ConnectionContext<ConnectionPool> connectionContext, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for MySQL plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for MySQL plugin.");
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
final Map<String, DatasourceStructure.Key> keyRegistry = new HashMap<>();
@ -764,7 +751,7 @@ public class MySqlPlugin extends BasePlugin {
Optional<PoolMetrics> poolMetricsOptional = connectionPool.getMetrics();
if (poolMetricsOptional.isPresent()) {
PoolMetrics poolMetrics = poolMetricsOptional.get();
System.out.println("Get structure: connection Pool Metrics: Acquired: "
log.debug("Get structure: connection Pool Metrics: Acquired: "
+ poolMetrics.acquiredSize() + ", Pending: "
+ poolMetrics.pendingAcquireSize() + ", Allocated: "
+ poolMetrics.allocatedSize() + ", idle: " + poolMetrics.idleSize()
@ -801,7 +788,7 @@ public class MySqlPlugin extends BasePlugin {
})
.collectList()
.map(list -> {
System.out.println(
log.debug(
Thread.currentThread().getName() + ": getTemplates from MySQL plugin.");
/* Get templates for each table and put those in. */
getTemplates(tablesByName);

View File

@ -75,9 +75,7 @@ public class OpenAiPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for OpenAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for OpenAI plugin.");
// Get prompt from action configuration
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -95,8 +93,7 @@ public class OpenAiPlugin extends BasePlugin {
ActionConfiguration actionConfiguration,
List<Map.Entry<String, String>> insertedParams) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for OpenAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for OpenAI plugin.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
initUtils.initializeResponseWithError(errorResult);
@ -205,8 +202,7 @@ public class OpenAiPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for OpenAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for OpenAI plugin.");
return RequestUtils.validateBearerTokenDatasource(datasourceConfiguration);
}
@ -214,8 +210,7 @@ public class OpenAiPlugin extends BasePlugin {
public Mono<TriggerResultDTO> trigger(
APIConnection connection, DatasourceConfiguration datasourceConfiguration, TriggerRequestDTO request) {
String printMessage = Thread.currentThread().getName() + ": trigger() called for OpenAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": trigger() called for OpenAI plugin.");
// Authentication will already be valid at this point
final BearerTokenAuth bearerTokenAuth = (BearerTokenAuth) datasourceConfiguration.getAuthentication();
assert (bearerTokenAuth.getBearerToken() != null);
@ -285,8 +280,7 @@ public class OpenAiPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for OpenAI plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for OpenAI plugin.");
final BearerTokenAuth bearerTokenAuth = (BearerTokenAuth) datasourceConfiguration.getAuthentication();
HttpMethod httpMethod = HttpMethod.GET;

View File

@ -94,8 +94,7 @@ public class OraclePlugin extends BasePlugin {
@Override
public Mono<HikariDataSource> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Oracle plugin.");
try {
Class.forName(JDBC_DRIVER);
} catch (ClassNotFoundException e) {
@ -106,7 +105,7 @@ public class OraclePlugin extends BasePlugin {
}
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName() + ": Connecting to Oracle db");
log.debug(Thread.currentThread().getName() + ": Connecting to Oracle db");
return createConnectionPool(datasourceConfiguration);
})
.subscribeOn(scheduler);
@ -119,8 +118,7 @@ public class OraclePlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Oracle plugin.");
return OracleDatasourceUtils.validateDatasource(datasourceConfiguration);
}
@ -140,9 +138,7 @@ public class OraclePlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for Oracle plugin.");
final Map<String, Object> formData = actionConfiguration.getFormData();
String query = getDataValueSafelyFromFormData(formData, BODY, STRING_TYPE, null);
// Check for query parameter before performing the probably expensive fetch connection from the pool op.
@ -201,8 +197,7 @@ public class OraclePlugin extends BasePlugin {
List<MustacheBindingToken> mustacheValuesInOrder,
ExecuteActionDTO executeActionDTO) {
String printMessage = Thread.currentThread().getName() + ": executeCommon() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeCommon() called for Oracle plugin.");
final Map<String, Object> requestData = new HashMap<>();
requestData.put("preparedStatement", TRUE.equals(preparedStatement) ? true : false);
@ -231,8 +226,7 @@ public class OraclePlugin extends BasePlugin {
// library throws SQLException in case the pool is closed or there is an issue initializing
// the connection pool which can also be translated in our world to StaleConnectionException
// and should then trigger the destruction and recreation of the pool.
System.out.println(
"Exception Occurred while getting connection from pool" + e.getMessage());
log.error("Exception Occurred while getting connection from pool" + e.getMessage());
e.printStackTrace(System.out);
return Mono.error(
e instanceof StaleConnectionException
@ -285,9 +279,9 @@ public class OraclePlugin extends BasePlugin {
statement,
preparedQuery);
} catch (SQLException e) {
System.out.println(Thread.currentThread().getName()
log.error(Thread.currentThread().getName()
+ ": In the OraclePlugin, got action execution error");
System.out.println(e.getMessage());
log.error(e.getMessage());
return Mono.error(new AppsmithPluginException(
OraclePluginError.QUERY_EXECUTION_FAILED,
OracleErrorMessages.QUERY_EXECUTION_FAILED_ERROR_MSG,
@ -305,7 +299,7 @@ public class OraclePlugin extends BasePlugin {
result.setBody(objectMapper.valueToTree(rowsList));
result.setMessages(populateHintMessages(columnsList));
result.setIsExecutionSuccess(true);
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": In the OraclePlugin, got action execution result");
return Mono.just(result);
})
@ -336,8 +330,7 @@ public class OraclePlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
HikariDataSource connectionPool, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Oracle plugin.");
return OracleDatasourceUtils.getStructure(connectionPool, datasourceConfiguration);
}
@ -446,9 +439,8 @@ public class OraclePlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Oracle plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Oracle plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier

View File

@ -53,12 +53,18 @@
<artifactId>pf4j-spring</artifactId>
<version>0.8.0</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>provided</scope>
</dependency>
<dependency>

View File

@ -242,9 +242,7 @@ public class PostgresPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": executeParameterized() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": executeParameterized() called for Postgres plugin.");
String query = actionConfiguration.getBody();
// Check for query parameter before performing the probably expensive fetch
// connection from the pool op.
@ -299,9 +297,8 @@ public class PostgresPlugin extends BasePlugin {
@Override
public ActionConfiguration getSchemaPreviewActionConfig(Template queryTemplate, Boolean isMock) {
String printMessage =
Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(
Thread.currentThread().getName() + ": getSchemaPreviewActionConfig() called for Postgres plugin.");
ActionConfiguration actionConfig = new ActionConfiguration();
// Sets query body
actionConfig.setBody(queryTemplate.getBody());
@ -317,9 +314,8 @@ public class PostgresPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Postgres plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
SSHConnection sshProxy = datasourceConfiguration.getSshProxy();
String identifier = "";
@ -362,7 +358,7 @@ public class PostgresPlugin extends BasePlugin {
Instant requestedAt = Instant.now();
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": Within the executeCommon method of PostgresPluginExecutor.");
Connection connectionFromPool;
@ -397,7 +393,7 @@ public class PostgresPlugin extends BasePlugin {
int activeConnections = poolProxy.getActiveConnections();
int totalConnections = poolProxy.getTotalConnections();
int threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"Before executing postgres query [%s] Hikari Pool stats: active - %d, idle - %d, awaiting - %d, total - %d",
query,
activeConnections,
@ -456,7 +452,7 @@ public class PostgresPlugin extends BasePlugin {
int objectSize = sizeof(rowsList);
if (objectSize > MAX_SIZE_SUPPORTED) {
System.out.println(String.format(
log.debug(String.format(
"[PostgresPlugin] Result size greater than maximum supported size of %d bytes. Current size: %d",
MAX_SIZE_SUPPORTED, objectSize));
return Mono.error(new AppsmithPluginException(
@ -502,13 +498,12 @@ public class PostgresPlugin extends BasePlugin {
} else if (JSON_TYPE_NAME.equalsIgnoreCase(typeName)
|| JSONB_TYPE_NAME.equalsIgnoreCase(typeName)) {
System.out.println(
Thread.currentThread().getName()
+ ": objectMapper readTree for Postgres plugin.");
log.debug(Thread.currentThread().getName()
+ ": objectMapper readTree for Postgres plugin.");
Stopwatch processStopwatch =
new Stopwatch("Postgres Plugin objectMapper readTree");
value = objectMapper.readTree(resultSet.getString(i));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
} else {
value = resultSet.getObject(i);
@ -537,7 +532,7 @@ public class PostgresPlugin extends BasePlugin {
}
} catch (SQLException e) {
System.out.println("In the PostgresPlugin, got action execution error");
log.debug("In the PostgresPlugin, got action execution error");
return Mono.error(new AppsmithPluginException(
PostgresPluginError.QUERY_EXECUTION_FAILED,
PostgresErrorMessages.QUERY_EXECUTION_FAILED_ERROR_MSG,
@ -547,7 +542,7 @@ public class PostgresPlugin extends BasePlugin {
// Since postgres json type field can only hold valid json data, this exception
// is not expected
// to occur.
System.out.println("In the PostgresPlugin, got action execution error");
log.error("In the PostgresPlugin, got action execution error");
return Mono.error(new AppsmithPluginException(
PostgresPluginError.QUERY_EXECUTION_FAILED,
PostgresErrorMessages.QUERY_EXECUTION_FAILED_ERROR_MSG,
@ -557,14 +552,14 @@ public class PostgresPlugin extends BasePlugin {
activeConnections = poolProxy.getActiveConnections();
totalConnections = poolProxy.getTotalConnections();
threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"After executing postgres query, Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException e) {
System.out.println("Execute Error closing Postgres ResultSet");
log.error("Execute Error closing Postgres ResultSet");
e.printStackTrace();
}
}
@ -573,7 +568,7 @@ public class PostgresPlugin extends BasePlugin {
try {
statement.close();
} catch (SQLException e) {
System.out.println("Execute Error closing Postgres Statement");
log.error("Execute Error closing Postgres Statement");
e.printStackTrace();
}
}
@ -582,7 +577,7 @@ public class PostgresPlugin extends BasePlugin {
try {
preparedQuery.close();
} catch (SQLException e) {
System.out.println("Execute Error closing Postgres Statement");
log.error("Execute Error closing Postgres Statement");
e.printStackTrace();
}
}
@ -592,21 +587,20 @@ public class PostgresPlugin extends BasePlugin {
// Return the connection back to the pool
connectionFromPool.close();
} catch (SQLException e) {
System.out.println("Execute Error returning Postgres connection to pool");
log.error("Execute Error returning Postgres connection to pool");
e.printStackTrace();
}
}
}
ActionExecutionResult result = new ActionExecutionResult();
System.out.println(
Thread.currentThread().getName() + ": objectMapper valueToTree for Postgres plugin.");
log.debug(Thread.currentThread().getName() + ": objectMapper valueToTree for Postgres plugin.");
Stopwatch processStopwatch = new Stopwatch("Postgres Plugin objectMapper valueToTree");
result.setBody(objectMapper.valueToTree(rowsList));
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
result.setMessages(populateHintMessages(columnsList));
result.setIsExecutionSuccess(true);
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": In the PostgresPlugin, got action execution result");
return Mono.just(result);
})
@ -670,8 +664,7 @@ public class PostgresPlugin extends BasePlugin {
@Override
public Mono<HikariDataSource> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Postgres plugin.");
try {
Class.forName(JDBC_DRIVER);
} catch (ClassNotFoundException e) {
@ -682,7 +675,7 @@ public class PostgresPlugin extends BasePlugin {
}
return connectionPoolConfig.getMaxConnectionPoolSize().flatMap(maxPoolSize -> Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName() + ": Connecting to Postgres db");
log.debug(Thread.currentThread().getName() + ": Connecting to Postgres db");
return createConnectionPool(datasourceConfiguration, maxPoolSize);
})
.subscribeOn(scheduler));
@ -697,9 +690,7 @@ public class PostgresPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Postgres plugin.");
Set<String> invalids = new HashSet<>();
if (CollectionUtils.isEmpty(datasourceConfiguration.getEndpoints())) {
@ -782,8 +773,7 @@ public class PostgresPlugin extends BasePlugin {
public Mono<DatasourceStructure> getStructure(
HikariDataSource connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Postgres plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Postgres plugin.");
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
@ -812,7 +802,7 @@ public class PostgresPlugin extends BasePlugin {
int activeConnections = poolProxy.getActiveConnections();
int totalConnections = poolProxy.getTotalConnections();
int threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"Before getting postgres db structure Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
@ -996,7 +986,7 @@ public class PostgresPlugin extends BasePlugin {
activeConnections = poolProxy.getActiveConnections();
totalConnections = poolProxy.getTotalConnections();
threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"After postgres db structure, Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
@ -1005,8 +995,7 @@ public class PostgresPlugin extends BasePlugin {
// Return the connection back to the pool
connectionFromPool.close();
} catch (SQLException e) {
System.out.println(
"Error returning Postgres connection to pool during get structure");
log.error("Error returning Postgres connection to pool during get structure");
e.printStackTrace();
}
}
@ -1016,7 +1005,7 @@ public class PostgresPlugin extends BasePlugin {
for (DatasourceStructure.Table table : structure.getTables()) {
table.getKeys().sort(Comparator.naturalOrder());
}
System.out.println(Thread.currentThread().getName() + ": Got the structure of postgres db");
log.debug(Thread.currentThread().getName() + ": Got the structure of postgres db");
return structure;
})
.map(resultStructure -> (DatasourceStructure) resultStructure)
@ -1097,12 +1086,12 @@ public class PostgresPlugin extends BasePlugin {
preparedStatement.setArray(index, null);
break;
case ARRAY: {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper readValue for Postgres plugin ARRAY class");
Stopwatch processStopwatch =
new Stopwatch("Postgres Plugin objectMapper readValue for ARRAY class");
List arrayListFromInput = objectMapper.readValue(value, List.class);
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
if (arrayListFromInput.isEmpty()) {
break;
}

View File

@ -68,8 +68,7 @@ public class RedisPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Redis plugin.");
String query = actionConfiguration.getBody();
List<RequestParamDTO> requestParams =
List.of(new RequestParamDTO(ACTION_CONFIGURATION_BODY, query, null, null, null));
@ -121,7 +120,7 @@ public class RedisPlugin extends BasePlugin {
objectMapper.valueToTree(removeQuotes(processCommandOutput(commandOutput))));
actionExecutionResult.setIsExecutionSuccess(true);
System.out.println(
log.debug(
Thread.currentThread().getName() + ": In the RedisPlugin, got action execution result");
return Mono.just(actionExecutionResult);
@ -256,15 +255,14 @@ public class RedisPlugin extends BasePlugin {
@Override
public Mono<JedisPool> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Redis plugin.");
return Mono.fromCallable(() -> {
final JedisPoolConfig poolConfig = buildPoolConfig();
int timeout =
(int) Duration.ofSeconds(CONNECTION_TIMEOUT).toMillis();
URI uri = RedisURIUtils.getURI(datasourceConfiguration);
JedisPool jedisPool = new JedisPool(poolConfig, uri, timeout);
System.out.println(Thread.currentThread().getName() + ": Created Jedis pool.");
log.debug(Thread.currentThread().getName() + ": Created Jedis pool.");
return jedisPool;
})
.subscribeOn(scheduler);
@ -272,8 +270,7 @@ public class RedisPlugin extends BasePlugin {
@Override
public void datasourceDestroy(JedisPool jedisPool) {
String printMessage = Thread.currentThread().getName() + ": datasourceDestroy() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for Redis plugin.");
// Schedule on elastic thread pool and subscribe immediately.
Mono.fromSupplier(() -> {
try {
@ -281,7 +278,7 @@ public class RedisPlugin extends BasePlugin {
jedisPool.destroy();
}
} catch (JedisException e) {
System.out.println("Error destroying Jedis pool.");
log.error("Error destroying Jedis pool.");
}
return Mono.empty();
@ -292,8 +289,7 @@ public class RedisPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Redis plugin.");
Set<String> invalids = new HashSet<>();
if (isEndpointMissing(datasourceConfiguration.getEndpoints())) {
@ -310,9 +306,8 @@ public class RedisPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": getEndpointIdentifierForRateLimit() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Redis plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier
@ -378,8 +373,7 @@ public class RedisPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(JedisPool connectionPool) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Redis plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Redis plugin.");
return Mono.just(connectionPool)
.flatMap(c -> verifyPing(connectionPool))

View File

@ -131,7 +131,7 @@ public class RedshiftPlugin extends BasePlugin {
private void checkResultSetValidity(ResultSet resultSet) throws AppsmithPluginException {
if (resultSet == null) {
System.out.println("Redshift plugin: getRow: driver failed to fetch result: resultSet is null.");
log.debug("Redshift plugin: getRow: driver failed to fetch result: resultSet is null.");
throw new AppsmithPluginException(
RedshiftPluginError.QUERY_EXECUTION_FAILED, RedshiftErrorMessages.NULL_RESULTSET_ERROR_MSG);
}
@ -147,7 +147,7 @@ public class RedshiftPlugin extends BasePlugin {
* ResultSetMetaData.
*/
if (metaData == null) {
System.out.println("Redshift plugin: getRow: metaData is null. Ideally this is never supposed to "
log.debug("Redshift plugin: getRow: metaData is null. Ideally this is never supposed to "
+ "happen as the Redshift JDBC driver does a null check before passing this object. This means "
+ "that something has gone wrong while processing the query result.");
throw new AppsmithPluginException(
@ -195,8 +195,7 @@ public class RedshiftPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Redshift plugin.");
String query = actionConfiguration.getBody();
List<RequestParamDTO> requestParams =
List.of(new RequestParamDTO(ACTION_CONFIGURATION_BODY, query, null, null, null));
@ -273,7 +272,7 @@ public class RedshiftPlugin extends BasePlugin {
try {
resultSet.close();
} catch (SQLException e) {
System.out.println("Error closing Redshift ResultSet");
log.error("Error closing Redshift ResultSet");
e.printStackTrace();
}
}
@ -282,7 +281,7 @@ public class RedshiftPlugin extends BasePlugin {
try {
statement.close();
} catch (SQLException e) {
System.out.println("Error closing Redshift Statement");
log.error("Error closing Redshift Statement");
e.printStackTrace();
}
}
@ -290,7 +289,7 @@ public class RedshiftPlugin extends BasePlugin {
try {
connection.close();
} catch (SQLException e) {
System.out.println("Error closing Redshift Connection");
log.error("Error closing Redshift Connection");
e.printStackTrace();
}
}
@ -299,7 +298,7 @@ public class RedshiftPlugin extends BasePlugin {
result.setBody(objectMapper.valueToTree(rowsList));
result.setMessages(populateHintMessages(columnsList));
result.setIsExecutionSuccess(true);
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": In the RedshiftPlugin, got action execution result");
return Mono.just(result);
})
@ -333,15 +332,13 @@ public class RedshiftPlugin extends BasePlugin {
}
public void printConnectionPoolStatus(HikariDataSource connectionPool, boolean isFetchingStructure) {
String printMessage =
Thread.currentThread().getName() + ": printConnectionPoolStatus() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": printConnectionPoolStatus() called for Redshift plugin.");
HikariPoolMXBean poolProxy = connectionPool.getHikariPoolMXBean();
int idleConnections = poolProxy.getIdleConnections();
int activeConnections = poolProxy.getActiveConnections();
int totalConnections = poolProxy.getTotalConnections();
int threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ (isFetchingStructure
? " Before fetching Redshift db structure."
: " Before executing Redshift query.")
@ -372,8 +369,7 @@ public class RedshiftPlugin extends BasePlugin {
@Override
public Mono<HikariDataSource> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for Redshift plugin.");
try {
Class.forName(JDBC_DRIVER);
} catch (ClassNotFoundException e) {
@ -384,7 +380,7 @@ public class RedshiftPlugin extends BasePlugin {
}
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName() + ": Connecting to Redshift db");
log.debug(Thread.currentThread().getName() + ": Connecting to Redshift db");
return createConnectionPool(datasourceConfiguration);
})
.subscribeOn(scheduler);
@ -392,9 +388,7 @@ public class RedshiftPlugin extends BasePlugin {
@Override
public void datasourceDestroy(HikariDataSource connectionPool) {
String printMessage =
Thread.currentThread().getName() + ": datasourceDestroy() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for Redshift plugin.");
if (connectionPool != null) {
connectionPool.close();
}
@ -402,9 +396,7 @@ public class RedshiftPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(@NonNull DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Redshift plugin.");
Set<String> invalids = new HashSet<>();
if (CollectionUtils.isEmpty(datasourceConfiguration.getEndpoints())) {
@ -449,9 +441,8 @@ public class RedshiftPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": getEndpointIdentifierForRateLimit() called for Redshift plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier
@ -625,8 +616,7 @@ public class RedshiftPlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
HikariDataSource connectionPool, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Redshift plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Redshift plugin.");
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName = new LinkedHashMap<>();
final Map<String, DatasourceStructure.Key> keyRegistry = new HashMap<>();
@ -665,7 +655,7 @@ public class RedshiftPlugin extends BasePlugin {
// Ref:
// <https://docs.oracle.com/en/java/javase/11/docs/api/java.sql/java/sql/DatabaseMetaData.html>.
System.out.println(Thread.currentThread().getName() + ": Getting Redshift Db structure");
log.debug(Thread.currentThread().getName() + ": Getting Redshift Db structure");
try (Statement statement = connection.createStatement()) {
// Get tables' schema and fill up their columns.
@ -697,7 +687,7 @@ public class RedshiftPlugin extends BasePlugin {
try {
connection.close();
} catch (SQLException e) {
System.out.println("Error closing Redshift Connection");
log.error("Error closing Redshift Connection");
e.printStackTrace();
}
}

View File

@ -72,9 +72,8 @@ public class RestApiPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName()
+ ": executeParameterized() called for RestAPI plugin. Executing the API call.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": executeParameterized() called for RestAPI plugin. Executing the API call.");
final List<Property> properties = actionConfiguration.getPluginSpecifiedTemplates();
List<Map.Entry<String, String>> parameters = new ArrayList<>();
@ -134,9 +133,8 @@ public class RestApiPlugin extends BasePlugin {
ActionConfiguration actionConfiguration,
List<Map.Entry<String, String>> insertedParams) {
String printMessage = Thread.currentThread().getName()
+ ": executeCommon() called for RestAPI plugin. Executing the API call.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": executeCommon() called for RestAPI plugin. Executing the API call.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
initUtils.initializeResponseWithError(errorResult);
@ -215,7 +213,7 @@ public class RestApiPlugin extends BasePlugin {
errorResult.setRequest(requestCaptureFilter.populateRequestFields(
actionExecutionRequest, isBodySentWithApiRequest, datasourceConfiguration));
errorResult.setIsExecutionSuccess(false);
System.out.println(String.format(
log.debug(String.format(
"An error has occurred while trying to run the API query for url: %s, path: %s",
datasourceConfiguration.getUrl(), actionConfiguration.getPath()));
error.printStackTrace();

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.pf4j.Extension;
import org.pf4j.PluginWrapper;
import org.springframework.http.HttpMethod;
@ -48,6 +49,7 @@ import java.util.Set;
import static org.springframework.http.HttpHeaders.CONTENT_TYPE;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
@Slf4j
public class SaasPlugin extends BasePlugin {
private static final int MAX_REDIRECTS = 5;
@ -85,8 +87,7 @@ public class SaasPlugin extends BasePlugin {
ExecutePluginDTO connection,
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Saas plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Saas plugin.");
// Initializing object for error condition
ActionExecutionResult errorResult = new ActionExecutionResult();
@ -135,12 +136,11 @@ public class SaasPlugin extends BasePlugin {
String valueAsString = "";
try {
System.out.println(
Thread.currentThread().getName() + ": objectMapper writing value as string for Saas plugin.");
log.debug(Thread.currentThread().getName() + ": objectMapper writing value as string for Saas plugin.");
Stopwatch processStopwatch =
new Stopwatch("SaaS Plugin objectMapper writing value as string for connection");
valueAsString = saasObjectMapper.writeValueAsString(connection);
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
@ -153,13 +153,13 @@ public class SaasPlugin extends BasePlugin {
byte[] body = stringResponseEntity.getBody();
if (statusCode.is2xxSuccessful()) {
try {
System.out.println(Thread.currentThread().getName()
log.debug(Thread.currentThread().getName()
+ ": objectMapper reading value as string for Saas plugin.");
Stopwatch processStopwatch =
new Stopwatch("SaaS Plugin objectMapper reading value as string for body");
ActionExecutionResult result =
saasObjectMapper.readValue(body, ActionExecutionResult.class);
processStopwatch.stopAndLogTimeInMillisWithSysOut();
processStopwatch.stopAndLogTimeInMillis();
return result;
} catch (IOException e) {
throw Exceptions.propagate(new AppsmithPluginException(

View File

@ -71,8 +71,7 @@ public class SmtpPlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for SMTP plugin.");
MimeMessage message = getMimeMessage(connection);
ActionExecutionResult result = new ActionExecutionResult();
try {
@ -166,7 +165,7 @@ public class SmtpPlugin extends BasePlugin {
}
// Send the email now
System.out.println("Going to send the email");
log.debug("Going to send the email");
Transport.send(message);
result.setIsExecutionSuccess(true);
@ -174,7 +173,7 @@ public class SmtpPlugin extends BasePlugin {
responseBody.put("message", "Sent the email successfully");
result.setBody(objectMapper.valueToTree(responseBody));
System.out.println("Sent the email successfully");
log.debug("Sent the email successfully");
} catch (MessagingException e) {
return Mono.error(new AppsmithPluginException(
SMTPPluginError.MAIL_SENDING_FAILED,
@ -200,8 +199,7 @@ public class SmtpPlugin extends BasePlugin {
@Override
public Mono<Session> datasourceCreate(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": datasourceCreate() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceCreate() called for SMTP plugin.");
Endpoint endpoint = datasourceConfiguration.getEndpoints().get(0);
DBAuth authentication = (DBAuth) datasourceConfiguration.getAuthentication();
@ -228,8 +226,7 @@ public class SmtpPlugin extends BasePlugin {
@Override
public void datasourceDestroy(Session session) {
String printMessage = Thread.currentThread().getName() + ": datasourceDestroy() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": datasourceDestroy() called for SMTP plugin.");
try {
if (session != null && session.getTransport() != null) {
session.getTransport().close();
@ -241,8 +238,7 @@ public class SmtpPlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": validateDatasource() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for SMTP plugin.");
Set<String> invalids = new HashSet<>();
if (CollectionUtils.isEmpty(datasourceConfiguration.getEndpoints())) {
invalids.add(SMTPErrorMessages.DS_MISSING_HOST_ADDRESS_ERROR_MSG);
@ -265,8 +261,7 @@ public class SmtpPlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(Session connection) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for SMTP plugin.");
return Mono.fromCallable(() -> {
Set<String> invalids = new HashSet<>();
try {
@ -280,7 +275,7 @@ public class SmtpPlugin extends BasePlugin {
} catch (AuthenticationFailedException e) {
invalids.add(SMTPErrorMessages.DS_AUTHENTICATION_FAILED_ERROR_MSG);
} catch (MessagingException e) {
System.out.println(e.getMessage());
log.error(e.getMessage());
invalids.add(SMTPErrorMessages.DS_CONNECTION_FAILED_TO_SMTP_SERVER_ERROR_MSG);
}
return invalids;
@ -290,9 +285,8 @@ public class SmtpPlugin extends BasePlugin {
@Override
public Mono<String> getEndpointIdentifierForRateLimit(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": getEndpointIdentifierForRateLimit() called for SMTP plugin.";
System.out.println(printMessage);
log.debug(
Thread.currentThread().getName() + ": getEndpointIdentifierForRateLimit() called for SMTP plugin.");
List<Endpoint> endpoints = datasourceConfiguration.getEndpoints();
String identifier = "";
// When hostname and port both are available, both will be used as identifier

View File

@ -71,8 +71,7 @@ public class SnowflakePlugin extends BasePlugin {
DatasourceConfiguration datasourceConfiguration,
ActionConfiguration actionConfiguration) {
String printMessage = Thread.currentThread().getName() + ": execute() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": execute() called for Snowflake plugin.");
String query = actionConfiguration.getBody();
if (!StringUtils.hasLength(query)) {
@ -82,7 +81,7 @@ public class SnowflakePlugin extends BasePlugin {
}
return Mono.fromCallable(() -> {
System.out.println(Thread.currentThread().getName() + ": Execute Snowflake Query");
log.debug(Thread.currentThread().getName() + ": Execute Snowflake Query");
Connection connectionFromPool;
try {
@ -107,7 +106,7 @@ public class SnowflakePlugin extends BasePlugin {
int activeConnections = poolProxy.getActiveConnections();
int totalConnections = poolProxy.getTotalConnections();
int threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"Before executing snowflake query [%s] Hikari Pool stats: active - %d, idle - %d, awaiting - %d, total - %d",
query,
activeConnections,
@ -126,7 +125,7 @@ public class SnowflakePlugin extends BasePlugin {
activeConnections = poolProxy.getActiveConnections();
totalConnections = poolProxy.getTotalConnections();
threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"After executing snowflake query, Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
@ -135,7 +134,7 @@ public class SnowflakePlugin extends BasePlugin {
// Return the connection back to the pool
connectionFromPool.close();
} catch (SQLException e) {
System.out.println("Execute Error returning Snowflake connection to pool");
log.error("Execute Error returning Snowflake connection to pool");
e.printStackTrace();
}
}
@ -156,13 +155,10 @@ public class SnowflakePlugin extends BasePlugin {
@Override
public Mono<HikariDataSource> createConnectionClient(
DatasourceConfiguration datasourceConfiguration, Properties properties) {
String printMessage =
Thread.currentThread().getName() + ": createConnectionClient() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": createConnectionClient() called for Snowflake plugin.");
return getHikariConfig(datasourceConfiguration, properties)
.flatMap(config -> Mono.fromCallable(() -> {
System.out.println(
Thread.currentThread().getName() + ": creating Snowflake connection client");
log.debug(Thread.currentThread().getName() + ": creating Snowflake connection client");
// Set up the connection URL
String jdbcUrl = getJDBCUrl(datasourceConfiguration);
config.setJdbcUrl(jdbcUrl);
@ -202,9 +198,8 @@ public class SnowflakePlugin extends BasePlugin {
@Override
public Properties addAuthParamsToConnectionConfig(
DatasourceConfiguration datasourceConfiguration, Properties properties) {
String printMessage = Thread.currentThread().getName()
+ ": addAuthParamsToConnectionConfig() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName()
+ ": addAuthParamsToConnectionConfig() called for Snowflake plugin.");
// Only for username password auth, we need to set these properties, for others
// like key-pair auth, authentication specific properties need to be set on config itself
AuthenticationDTO authentication = datasourceConfiguration.getAuthentication();
@ -244,9 +239,7 @@ public class SnowflakePlugin extends BasePlugin {
@Override
public Set<String> validateDatasource(DatasourceConfiguration datasourceConfiguration) {
String printMessage =
Thread.currentThread().getName() + ": validateDatasource() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": validateDatasource() called for Snowflake plugin.");
Set<String> invalids = new HashSet<>();
if (StringUtils.isEmpty(datasourceConfiguration.getUrl())) {
@ -315,11 +308,10 @@ public class SnowflakePlugin extends BasePlugin {
@Override
public Mono<DatasourceTestResult> testDatasource(HikariDataSource connection) {
String printMessage = Thread.currentThread().getName() + ": testDatasource() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": testDatasource() called for Snowflake plugin.");
return Mono.just(connection)
.flatMap(connectionPool -> {
System.out.println(Thread.currentThread().getName() + ": Testing Snowflake Datasource");
log.debug(Thread.currentThread().getName() + ": Testing Snowflake Datasource");
Connection connectionFromPool;
try {
/**
@ -359,8 +351,7 @@ public class SnowflakePlugin extends BasePlugin {
@Override
public Mono<DatasourceStructure> getStructure(
HikariDataSource connection, DatasourceConfiguration datasourceConfiguration) {
String printMessage = Thread.currentThread().getName() + ": getStructure() called for Snowflake plugin.";
System.out.println(printMessage);
log.debug(Thread.currentThread().getName() + ": getStructure() called for Snowflake plugin.");
final DatasourceStructure structure = new DatasourceStructure();
final Map<String, DatasourceStructure.Table> tablesByName = new LinkedHashMap<>();
final Map<String, DatasourceStructure.Key> keyRegistry = new HashMap<>();
@ -390,7 +381,7 @@ public class SnowflakePlugin extends BasePlugin {
int activeConnections = poolProxy.getActiveConnections();
int totalConnections = poolProxy.getTotalConnections();
int threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"Before getting snowflake structure Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
@ -430,8 +421,7 @@ public class SnowflakePlugin extends BasePlugin {
table.getKeys().sort(Comparator.naturalOrder());
}
} catch (SQLException throwable) {
System.out.println(
"Exception caught while fetching structure of Snowflake datasource. Cause:");
log.debug("Exception caught while fetching structure of Snowflake datasource. Cause:");
throwable.printStackTrace();
throw new AppsmithPluginException(
AppsmithPluginError.PLUGIN_GET_STRUCTURE_ERROR,
@ -439,12 +429,12 @@ public class SnowflakePlugin extends BasePlugin {
throwable.getMessage(),
"SQLSTATE: " + throwable.getSQLState());
} finally {
System.out.println(Thread.currentThread().getName() + ": Get Structure Snowflake");
log.debug(Thread.currentThread().getName() + ": Get Structure Snowflake");
idleConnections = poolProxy.getIdleConnections();
activeConnections = poolProxy.getActiveConnections();
totalConnections = poolProxy.getTotalConnections();
threadsAwaitingConnection = poolProxy.getThreadsAwaitingConnection();
System.out.println(String.format(
log.debug(String.format(
"After snowflake structure, Hikari Pool stats active - %d, idle - %d, awaiting - %d, total - %d",
activeConnections, idleConnections, threadsAwaitingConnection, totalConnections));
@ -453,8 +443,7 @@ public class SnowflakePlugin extends BasePlugin {
// Return the connection back to the pool
connectionFromPool.close();
} catch (SQLException e) {
System.out.println(
"Error returning snowflake connection to pool during get structure");
log.error("Error returning snowflake connection to pool during get structure");
e.printStackTrace();
}
}