Merge branch 'release' of https://github.com/appsmithorg/appsmith into release

This commit is contained in:
Automated Github Action 2020-09-02 12:08:11 +00:00
commit 0f978b487d
20 changed files with 865 additions and 454 deletions

View File

@ -25,9 +25,11 @@ import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@ -43,6 +45,8 @@ public class MySqlPlugin extends BasePlugin {
private static final String PASSWORD = "password";
private static final int VALIDITY_CHECK_TIMEOUT = 5;
private static final String DATE_COLUMN_TYPE_NAME = "date";
public MySqlPlugin(PluginWrapper wrapper) {
super(wrapper);
}
@ -88,11 +92,38 @@ public class MySqlPlugin extends BasePlugin {
ResultSetMetaData metaData = resultSet.getMetaData();
int colCount = metaData.getColumnCount();
while (resultSet.next()) {
Map<String, Object> row = new HashMap<>(colCount);
for (int i = 1; i <= colCount; i++) {
row.put(metaData.getColumnName(i), resultSet.getObject(i));
}
// Use `LinkedHashMap` here so that the column ordering is preserved in the response.
Map<String, Object> row = new LinkedHashMap<>(colCount);
rowsList.add(row);
for (int i = 1; i <= colCount; i++) {
Object value;
final String typeName = metaData.getColumnTypeName(i);
if (resultSet.getObject(i) == null) {
value = null;
} else if (DATE_COLUMN_TYPE_NAME.equalsIgnoreCase(typeName)) {
value = DateTimeFormatter.ISO_DATE.format(resultSet.getDate(i).toLocalDate());
} else if ("datetime".equalsIgnoreCase(typeName) || "timestamp".equalsIgnoreCase(typeName)) {
value = DateTimeFormatter.ISO_DATE_TIME.format(
LocalDateTime.of(
resultSet.getDate(i).toLocalDate(),
resultSet.getTime(i).toLocalTime()
)
) + "Z";
} else if ("year".equalsIgnoreCase(typeName)) {
value = resultSet.getDate(i).toLocalDate().getYear();
} else {
value = resultSet.getObject(i);
}
row.put(metaData.getColumnLabel(i), value);
}
}
} else {

View File

@ -1,6 +1,13 @@
package com.external.plugins;
import com.appsmith.external.models.*;
import com.appsmith.external.models.ActionConfiguration;
import com.appsmith.external.models.ActionExecutionResult;
import com.appsmith.external.models.AuthenticationDTO;
import com.appsmith.external.models.DatasourceConfiguration;
import com.appsmith.external.models.Endpoint;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import lombok.extern.log4j.Log4j;
import org.junit.Before;
import org.junit.ClassRule;
@ -10,21 +17,31 @@ import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.sql.Statement;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import static org.junit.Assert.*;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Log4j
public class MySqlPluginTest {
MySqlPlugin.MySqlPluginExecutor pluginExecutor = new MySqlPlugin.MySqlPluginExecutor();
@SuppressWarnings("rawtypes") // The type parameter for the container type is just itself and is pseudo-optional.
@ClassRule
public static MySQLContainer mySQLContainer = new MySQLContainer()
.withUsername("mysql").withPassword("password").withDatabaseName("mysql");
.withUsername("mysql")
.withPassword("password")
.withDatabaseName("mysql");
String address;
Integer port;
@ -34,11 +51,67 @@ public class MySqlPluginTest {
@Before
public void setUp() {
if (address != null) {
return;
}
address = mySQLContainer.getContainerIpAddress();
port = mySQLContainer.getFirstMappedPort();
username = mySQLContainer.getUsername();
password = mySQLContainer.getPassword();
createDatasourceConfiguration();
Properties properties = new Properties();
properties.putAll(Map.of(
"user", username,
"password", password
));
try (Connection connection = DriverManager.getConnection(
"jdbc:mysql://" + address + ":" + port + "/" + username,
properties
)) {
try (Statement statement = connection.createStatement()) {
statement.execute("DROP TABLE IF EXISTS users");
}
try (Statement statement = connection.createStatement()) {
statement.execute("CREATE TABLE users (\n" +
" id serial PRIMARY KEY,\n" +
" username VARCHAR (50) UNIQUE NOT NULL,\n" +
" password VARCHAR (50) NOT NULL,\n" +
" email VARCHAR (355) UNIQUE NOT NULL,\n" +
" spouse_dob DATE,\n" +
" dob DATE NOT NULL,\n" +
" yob YEAR NOT NULL,\n" +
" time1 TIME NOT NULL,\n" +
" created_on TIMESTAMP NOT NULL,\n" +
" updated_on DATETIME NOT NULL\n" +
")");
}
try (Statement statement = connection.createStatement()) {
statement.execute(
"INSERT INTO users VALUES (" +
"1, 'Jack', 'jill', 'jack@exemplars.com', NULL, '2018-12-31', 2018," +
" '18:32:45'," +
" '2018-11-30 20:45:15', '2018-11-30 20:45:15'" +
")");
}
try (Statement statement = connection.createStatement()) {
statement.execute(
"INSERT INTO users VALUES (" +
"2, 'Jill', 'jack', 'jill@exemplars.com', NULL, '2019-12-31', 2019," +
" '15:45:30'," +
" '2019-11-30 23:59:59', '2019-11-30 23:59:59'" +
")");
}
} catch (SQLException throwable) {
throwable.printStackTrace();
}
}
private DatasourceConfiguration createDatasourceConfiguration() {
@ -137,7 +210,79 @@ public class MySqlPluginTest {
}
})
.verifyComplete();
}
@Test
public void testAliasColumnNames() {
DatasourceConfiguration dsConfig = createDatasourceConfiguration();
Mono<Object> dsConnectionMono = pluginExecutor.datasourceCreate(dsConfig);
ActionConfiguration actionConfiguration = new ActionConfiguration();
actionConfiguration.setBody("SELECT id as user_id FROM users WHERE id = 1");
Mono<ActionExecutionResult> executeMono = dsConnectionMono
.flatMap(conn -> pluginExecutor.execute(conn, dsConfig, actionConfiguration));
StepVerifier.create(executeMono)
.assertNext(result -> {
final JsonNode node = ((ArrayNode) result.getBody()).get(0);
assertArrayEquals(
new String[]{
"user_id"
},
new ObjectMapper()
.convertValue(node, LinkedHashMap.class)
.keySet()
.toArray()
);
})
.verifyComplete();
}
@Test
public void testExecuteDataTypes() {
DatasourceConfiguration dsConfig = createDatasourceConfiguration();
Mono<Object> dsConnectionMono = pluginExecutor.datasourceCreate(dsConfig);
ActionConfiguration actionConfiguration = new ActionConfiguration();
actionConfiguration.setBody("SELECT * FROM users WHERE id = 1");
Mono<ActionExecutionResult> executeMono = dsConnectionMono
.flatMap(conn -> pluginExecutor.execute(conn, dsConfig, actionConfiguration));
StepVerifier.create(executeMono)
.assertNext(result -> {
assertNotNull(result);
assertTrue(result.getIsExecutionSuccess());
assertNotNull(result.getBody());
final JsonNode node = ((ArrayNode) result.getBody()).get(0);
assertEquals("2018-12-31", node.get("dob").asText());
assertEquals("2018", node.get("yob").asText());
assertTrue(node.get("time1").asText().matches("\\d{2}:\\d{2}:\\d{2}"));
assertTrue(node.get("created_on").asText().matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"));
assertTrue(node.get("updated_on").asText().matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"));
assertArrayEquals(
new String[]{
"id",
"username",
"password",
"email",
"spouse_dob",
"dob",
"yob",
"time1",
"created_on",
"updated_on"
},
new ObjectMapper()
.convertValue(node, LinkedHashMap.class)
.keySet()
.toArray()
);
})
.verifyComplete();
}
}

View File

@ -47,9 +47,10 @@ public class PostgresPlugin extends BasePlugin {
private static final String USER = "user";
private static final String PASSWORD = "password";
private static final String SSL = "ssl";
private static final String DATE_COLUMN_TYPE_NAME = "date";
private static final int VALIDITY_CHECK_TIMEOUT = 5;
private static final String DATE_COLUMN_TYPE_NAME = "date";
public PostgresPlugin(PluginWrapper wrapper) {
super(wrapper);
}

View File

@ -38,6 +38,7 @@ public class PostgresPluginTest {
PostgresPlugin.PostgresPluginExecutor pluginExecutor = new PostgresPlugin.PostgresPluginExecutor();
@SuppressWarnings("rawtypes") // The type parameter for the container type is just itself and is pseudo-optional.
@ClassRule
public static final PostgreSQLContainer pgsqlContainer = new PostgreSQLContainer<>("postgres:alpine")
.withExposedPorts(5432)
@ -79,7 +80,7 @@ public class PostgresPluginTest {
}
try (Statement statement = connection.createStatement()) {
statement.execute("CREATE TABLE users(\n" +
statement.execute("CREATE TABLE users (\n" +
" id serial PRIMARY KEY,\n" +
" username VARCHAR (50) UNIQUE NOT NULL,\n" +
" password VARCHAR (50) NOT NULL,\n" +
@ -114,8 +115,8 @@ public class PostgresPluginTest {
")");
}
} catch (SQLException throwables) {
throwables.printStackTrace();
} catch (SQLException throwable) {
throwable.printStackTrace();
}
}
@ -151,6 +152,33 @@ public class PostgresPluginTest {
.verifyComplete();
}
@Test
public void testAliasColumnNames() {
DatasourceConfiguration dsConfig = createDatasourceConfiguration();
Mono<Object> dsConnectionMono = pluginExecutor.datasourceCreate(dsConfig);
ActionConfiguration actionConfiguration = new ActionConfiguration();
actionConfiguration.setBody("SELECT id as user_id FROM users WHERE id = 1");
Mono<ActionExecutionResult> executeMono = dsConnectionMono
.flatMap(conn -> pluginExecutor.execute(conn, dsConfig, actionConfiguration));
StepVerifier.create(executeMono)
.assertNext(result -> {
final JsonNode node = ((ArrayNode) result.getBody()).get(0);
assertArrayEquals(
new String[]{
"user_id"
},
new ObjectMapper()
.convertValue(node, LinkedHashMap.class)
.keySet()
.toArray()
);
})
.verifyComplete();
}
@Test
public void testExecute() {
DatasourceConfiguration dsConfig = createDatasourceConfiguration();
@ -159,12 +187,11 @@ public class PostgresPluginTest {
ActionConfiguration actionConfiguration = new ActionConfiguration();
actionConfiguration.setBody("SELECT * FROM users WHERE id = 1");
Mono<Object> executeMono = dsConnectionMono.flatMap(conn -> pluginExecutor.execute(conn, dsConfig, actionConfiguration));
Mono<ActionExecutionResult> executeMono = dsConnectionMono
.flatMap(conn -> pluginExecutor.execute(conn, dsConfig, actionConfiguration));
StepVerifier.create(executeMono)
.assertNext(obj -> {
ActionExecutionResult result = (ActionExecutionResult) obj;
.assertNext(result -> {
assertNotNull(result);
assertTrue(result.getIsExecutionSuccess());
assertNotNull(result.getBody());

View File

@ -29,6 +29,7 @@ import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.util.MultiValueMap;
import org.springframework.util.StringUtils;
import reactor.core.Exceptions;
import reactor.core.publisher.Flux;
@ -742,4 +743,10 @@ public class UserServiceImpl extends BaseService<UserRepository, User, String> i
return Mono.just(Boolean.TRUE);
}
@Override
public Flux<User> get(MultiValueMap<String, String> params) {
// Get All Users should not be supported. Return an error
return Flux.error(new AppsmithException(AppsmithError.UNSUPPORTED_OPERATION));
}
}

View File

@ -23,6 +23,9 @@ import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.test.context.support.WithUserDetails;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.CollectionUtils;
import org.springframework.util.LinkedCaseInsensitiveMap;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@ -372,5 +375,17 @@ public class UserServiceTest {
.verifyComplete();
}
@Test
@WithUserDetails(value = "api_user")
public void getAllUsersTest() {
Flux<User> userFlux = userService.get(CollectionUtils.toMultiValueMap(new LinkedCaseInsensitiveMap<>()));
StepVerifier.create(userFlux)
.expectErrorMatches(throwable -> throwable instanceof AppsmithException &&
throwable.getMessage().equals(AppsmithError.UNSUPPORTED_OPERATION.getMessage()))
.verify();
}
}

View File

@ -42,7 +42,7 @@ Assuming you have node (>=v12), use the following command to run the migration:
```sh
npm install
node main.js 'https://localhost/api/v1/' 'mongodb://localhost:27017/mobtools'
node acl-migration.js 'https://localhost/api/v1/' 'mongodb://localhost:27017/mobtools'
```
The first argument should be a running API endpoint, and the second argument should be a URI to the database that this

View File

@ -0,0 +1,206 @@
if (process.argv.length !== 5) {
console.error("Takes three arguments, the MongoDB URL (like 'mongodb://localhost:27017/mobtools'),\n" +
"\tthe encryption salt and the encryption password used by the server connecting to this DB.");
process.exit(1);
}
const MONGODB_URL = process.argv[2];
const ENCRYPTION_SALT = process.argv[3];
const ENCRYPTION_PASSWORD = process.argv[4];
const { MongoClient, ObjectID } = require("mongodb");
const fs = require("fs");
const path = require("path");
const CryptoJS = require("crypto-js");
const mongoClient = new MongoClient(MONGODB_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
console.time("total time taken");
main()
.then(() => console.log("\nFinished Successfully."))
.catch(error => console.error(error))
.finally(() => {
mongoClient.close();
console.timeEnd("total time taken");
console.log();
});
async function main() {
const con = await mongoClient.connect();
const db = con.db();
const pluginPackageNameByIds = {};
for (const plugin of await db.collection("plugin").find().toArray()) {
pluginPackageNameByIds[plugin._id.toString()] = plugin.packageName;
}
const templateOrganizationId = (await db.collection("config").findOne({name: "template-organization"})).config.organizationId;
const organization = await db.collection("organization").findOne({_id: ObjectID(templateOrganizationId)});
const $datasources = await db.collection("datasource")
.find({organizationId: templateOrganizationId, deleted: false})
.map(datasource => {
const datasourceConfiguration = datasource.datasourceConfiguration;
if (datasourceConfiguration.authentication && datasourceConfiguration.authentication.password) {
datasourceConfiguration.authentication.password = decrypt(datasourceConfiguration.authentication.password);
}
return {
name: datasource.name,
$pluginPackageName: pluginPackageNameByIds[datasource.pluginId],
datasourceConfiguration: datasourceConfiguration,
invalids: datasource.invalids,
deleted: false,
policies: [],
_class: datasource._class,
};
})
.toArray();
const allPageIds = [];
const allDefaultPageIds = new Set();
const $applications = await db.collection("application")
.find({organizationId: templateOrganizationId, deleted: false, isPublic: true})
.map(application => {
allPageIds.push(...application.pages.map(page => ObjectID(page._id)));
allDefaultPageIds.add(application.pages.filter(page => page.isDefault)[0]._id.toString());
return {
name: application.name,
isPublic: true,
$pages: [],
pages: application.pages,
deleted: false,
policies: [],
_class: application._class,
};
})
.toArray();
const actionsByPageId = {};
for (const action of await db.collection("action").find({organizationId: templateOrganizationId, deleted: false}).toArray()) {
if (!actionsByPageId[action.pageId]) {
actionsByPageId[action.pageId] = [];
}
let $isEmbedded = typeof action.datasource._id === "undefined";
actionsByPageId[action.pageId].push({
name: action.name,
datasource: {
$isEmbedded,
name: action.datasource.name,
$pluginPackageName: pluginPackageNameByIds[action.datasource.pluginId],
datasourceConfiguration: action.datasource.datasourceConfiguration,
invalids: action.datasource.invalids,
deleted: false,
policies: [],
},
actionConfiguration: action.actionConfiguration,
pluginType: action.pluginType,
executeOnLoad: action.executeOnLoad,
dynamicBindingPathList: action.dynamicBindingPathList,
isValid: action.isValid,
invalids: action.invalids,
jsonPathKeys: action.jsonPathKeys,
deleted: false,
policies: [],
_class: action._class,
});
}
const pagesById = {};
for (const page of await db.collection("page").find({_id: {$in: allPageIds}}).toArray()) {
const pageId = page._id.toString();
for (const layout of page.layouts) {
delete layout._id;
for (const actionSet of layout.layoutOnLoadActions) {
for (const action of actionSet) {
delete action._id;
}
}
for (const actionSet of layout.publishedLayoutOnLoadActions) {
for (const action of actionSet) {
delete action._id;
}
}
}
pagesById[pageId] = {
name: page.name,
$isDefault: allDefaultPageIds.has(pageId),
$actions: actionsByPageId[pageId],
layouts: page.layouts,
deleted: false,
policies: [],
_class: page._class,
};
}
for (const application of $applications) {
application.$pages = [];
for (const page of application.pages) {
application.$pages.push(pagesById[page._id]);
}
delete application.pages;
}
const finalData = {
name: organization.name,
organizationSettings: organization.organizationSettings,
slug: organization.slug,
userRoles: [],
deleted: false,
policies: [],
_class: organization._class,
$datasources,
$applications,
};
if (finalData.slug !== "example-apps") {
console.warn("The slug of the organization in the generated dump is not `example-apps`. This might be significant.");
}
fs.writeFileSync(
findExamplesJsonPath(),
JSON.stringify(finalData, null, 2)
);
}
function findExamplesJsonPath() {
let projectDir = __dirname;
while (projectDir != null && !fs.existsSync(path.join(projectDir, "appsmith-server"))) {
projectDir = path.dirname(projectDir);
}
return path.join(projectDir, "appsmith-server", "src", "main", "resources", "examples-organization.json");
}
/*!
* Author: flohall
* date: 2019-11-05
* file: module/textEncryptor.js
* Original: <https://stackoverflow.com/a/58720652/151048>.
*/
const key = CryptoJS.PBKDF2(ENCRYPTION_PASSWORD, ENCRYPTION_SALT, {
keySize: 256 / 32,
iterations: 1024
});
const decryptConfig = {
// same as NULL_IV_GENERATOR of AesBytesEncryptor - so encryption creates always same cipher text for same input
iv: {words: [0, 0, 0, 0, 0, 0, 0, 0], sigBytes: 0},
padding: CryptoJS.pad.Pkcs7,
mode: CryptoJS.mode.CBC
};
function decrypt(text) {
return CryptoJS.AES
.decrypt({ciphertext: CryptoJS.enc.Hex.parse(text)}, key, decryptConfig)
.toString(CryptoJS.enc.Utf8);
}

View File

@ -45,6 +45,12 @@
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=",
"dev": true
},
"crypto-js": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.0.0.tgz",
"integrity": "sha512-bzHZN8Pn+gS7DQA6n+iUmBfl0hO5DJq++QP3U6uTucDtk/0iGpXd/Gg7CGR0p8tJhofJyaKoWBuJI4eAO00BBg==",
"dev": true
},
"debug": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",

View File

@ -3,15 +3,16 @@
"version": "1.0.0",
"description": "",
"private": true,
"main": "main.js",
"scripts": {
"run": "node main.js"
"acl-migration": "node acl-migration.js",
"dump-examples-org": "node dump-examples-org.js"
},
"keywords": [],
"author": "Appsmith",
"devDependencies": {
"axios": "^0.19.2",
"axios-cookiejar-support": "^1.0.0",
"crypto-js": "^4.0.0",
"mongodb": "^3.5.8",
"tough-cookie": "^4.0.0"
}

View File

@ -3,46 +3,68 @@
set -o errexit
is_command_present() {
type "$1" >/dev/null 2>&1
type "$1" >/dev/null 2>&1
}
is_mac() {
[[ $OSTYPE == darwin* ]]
}
# This function checks if the relevant ports required by Appsmith are available or not
# The script should error out in case they aren't available
check_ports_occupied() {
ports_occupied="$(
if [[ "$OSTYPE" == "darwin"* ]]; then
sudo netstat -anp tcp
else
sudo netstat -tupln tcp
fi | awk '$6 == "LISTEN" && $4 ~ /^.*[.:](80|443)$/' | wc -l | grep -o '[[:digit:]]\+'
)"
local port_check_output
local ports_pattern="80|443"
if is_mac; then
port_check_output="$(netstat -anp tcp | awk '$6 == "LISTEN" && $4 ~ /^.*\.('"$ports_pattern"')$/')"
elif is_command_present ss; then
# The `ss` command seems to be a better/faster version of `netstat`, but is not available on all Linux
# distributions by default. Other distributions have `ss` but no `netstat`. So, we try for `ss` first, then
# fallback to `netstat`.
port_check_output="$(ss --all --numeric --tcp | awk '$1 == "LISTEN" && $4 ~ /^.*:('"$ports_pattern"')$/')"
elif is_command_present netstat; then
port_check_output="$(netstat --all --numeric --tcp | awk '$6 == "LISTEN" && $4 ~ /^.*:('"$ports_pattern"')$/')"
fi
if [[ -n $port_check_output ]]; then
echo "+++++++++++ ERROR ++++++++++++++++++++++"
echo "Appsmith requires ports 80 & 443 to be open. Please shut down any other service(s) that may be running on these ports."
echo "++++++++++++++++++++++++++++++++++++++++"
echo ""
bye
fi
}
install_docker() {
if [[ $package_manager -eq apt-get ]];then
echo "++++++++++++++++++++++++"
echo "Setting up docker repos"
sudo $package_manager update --quiet
echo "++++++++++++++++++++++++"
echo "Setting up docker repos"
sudo apt-get -y --quiet install gnupg-agent
if [[ $package_manager == apt-get ]]; then
apt_cmd="sudo apt-get --yes --quiet"
$apt_cmd update
$apt_cmd install gnupg-agent
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
"deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
$apt_cmd update
echo "Installing docker"
$apt_cmd install docker-ce docker-ce-cli containerd.io
else
sudo yum install -y yum-utils
yum_cmd="sudo yum --assumeyes --quiet"
$yum_cmd install yum-utils
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
echo "Installing docker"
$yum_cmd install docker-ce docker-ce-cli containerd.io
fi
sudo ${package_manager} -y update --quiet
echo "Installing docker"
sudo ${package_manager} -y install docker-ce docker-ce-cli containerd.io --quiet
}
install_docker_compose() {
if [ $package_manager == "apt-get" -o $package_manager == "yum" ];then
if [ ! -f /usr/bin/docker-compose ];then
if [[ $package_manager == "apt-get" || $package_manager == "yum" ]]; then
if [[ ! -f /usr/bin/docker-compose ]];then
echo "Installing docker-compose..."
sudo curl -L "https://github.com/docker/compose/releases/download/1.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
@ -67,29 +89,29 @@ start_docker() {
}
check_os() {
if [[ "$OSTYPE" == "darwin"* ]]; then
if is_mac; then
package_manager="brew"
desired_os=1
return
fi
os_name=`cat /etc/*-release | egrep "^NAME="`
os_name="${os_name#*=}"
os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')"
case "${os_name}" in
\"Ubuntu*\")
case "$os_name" in
Ubuntu*)
desired_os=1
package_manager="apt-get"
;;
\"Red\ Hat*\")
desired_os=0
Red\ Hat*)
desired_os=1
package_manager="yum"
;;
\"CentOS*\")
desired_os=0
CentOS*)
desired_os=1
package_manager="yum"
;;
*) desired_os=0
*)
desired_os=0
esac
}
@ -98,16 +120,11 @@ overwrite_file() {
local template_file="$2"
local full_path="$install_dir/$relative_path"
if [[ -f $full_path ]]; then
read -p "File $relative_path already exists. Would you like to replace it? [Y]: " value
value=${value:-Y}
if ! [[ $value == "Y" || $value == "y" || $value == "yes" || $value == "Yes" ]]; then
echo "You chose not to replace existing file: '$full_path'."
rm -f "$template_file"
echo "File $template_file removed from source directory."
echo ""
fi
if [[ -f $full_path ]] && ! confirm y "File $relative_path already exists. Would you like to replace it?"; then
echo "You chose NOT to replace existing file: '$full_path'."
rm -f "$template_file"
echo "File $template_file removed from source directory."
echo ""
else
mv -f "$template_file" "$full_path"
echo "File $full_path moved successfully!"
@ -116,30 +133,28 @@ overwrite_file() {
# This function prompts the user for an input for a non-empty Mongo root password.
read_mongo_password() {
read -sp 'Set the mongo password: ' mongo_root_password
while [[ -z $mongo_root_password ]]
do
read -srp 'Set the mongo password: ' mongo_root_password
while [[ -z $mongo_root_password ]]; do
echo ""
echo ""
echo "+++++++++++ ERROR ++++++++++++++++++++++"
echo "The mongo password cannot be empty. Please input a valid password string."
echo "++++++++++++++++++++++++++++++++++++++++"
echo ""
read -sp 'Set the mongo password: ' mongo_root_password
read -srp 'Set the mongo password: ' mongo_root_password
done
}
# This function prompts the user for an input for a non-empty Mongo username.
read_mongo_username() {
read -p 'Set the mongo root user: ' mongo_root_user
while [[ -z $mongo_root_user ]]
do
read -rp 'Set the mongo root user: ' mongo_root_user
while [[ -z $mongo_root_user ]]; do
echo ""
echo "+++++++++++ ERROR ++++++++++++++++++++++"
echo "The mongo username cannot be empty. Please input a valid username string."
echo "++++++++++++++++++++++++++++++++++++++++"
echo ""
read -p 'Set the mongo root user: ' mongo_root_user
read -rp 'Set the mongo root user: ' mongo_root_user
done
}
@ -163,7 +178,7 @@ wait_for_containers_start() {
urlencode() {
# urlencode <string>
old_lc_collate=$LC_COLLATE
local old_lc_collate="$LC_COLLATE"
LC_COLLATE=C
local length="${#1}"
@ -175,13 +190,44 @@ urlencode() {
esac
done
LC_COLLATE=$old_lc_collate
LC_COLLATE="$old_lc_collate"
}
generate_password() {
# Picked up the following method of generation from : https://gist.github.com/earthgecko/3089509
LC_CTYPE=C tr -dc 'a-zA-Z0-9' < /dev/urandom | fold -w 13 | head -n 1
}
confirm() {
local default="$1" # Should be `y` or `n`.
local prompt="$2"
local options="y/N"
if [[ $default == y || $default == Y ]]; then
options="Y/n"
fi
local answer
read -n1 -rp "$prompt [$options] " answer
if [[ -z $answer ]]; then
# No answer given, the user just hit the Enter key. Take the default value as the answer.
answer="$default"
else
# An answer was given. This means the user didn't get to hit Enter so the cursor on the same line. Do an empty
# echo so the cursor moves to a new line.
echo
fi
[[ yY =~ $answer ]]
}
echo_contact_support() {
echo "Please contact <support@appsmith.com> with your OS details and version${1:-.}"
}
bye() { # Prints a friendly good bye message and exits the script.
echo ""
echo -e "Exiting for now. Bye! \U1F44B"
exit
echo -e "\nExiting for now. Bye! \U1F44B\n"
exit 1
}
echo -e "\U1F44B Thank you for trying out Appsmith! "
@ -196,32 +242,38 @@ check_os
if [[ $desired_os -eq 0 ]];then
echo ""
echo "This script is currently meant to install Appsmith on Mac OS X | Ubuntu | RHEL | CentOS machines."
echo "Please contact support@appsmith.com with your OS details if you wish to extend this support"
echo_contact_support " if you wish to extend this support."
bye
else
echo "You're on an OS that is supported by this installation script."
echo ""
fi
if [[ "$OSTYPE" == "darwin"* && "$EUID" -eq 0 ]]; then
echo "Please do not run this script with root permissions on macOS."
echo "Please contact support@appsmith.com with your OS details if you wish to extend this support"
if [[ $EUID -eq 0 ]]; then
echo "Please do not run this script as root/sudo."
echo_contact_support
bye
fi
check_ports_occupied
if [[ $ports_occupied -ne 0 ]]; then
echo "+++++++++++ ERROR ++++++++++++++++++++++"
echo "Appsmith requires ports 80 & 443 to be open. Please shut down any other service(s) that may be running on these ports."
echo "++++++++++++++++++++++++++++++++++++++++"
echo ""
bye
read -rp 'Installation Directory [appsmith]: ' install_dir
install_dir="${install_dir:-appsmith}"
if [[ $install_dir != /* ]]; then
# If it's not an absolute path, prepend current working directory to it, to make it an absolute path.
install_dir="$PWD/$install_dir"
fi
if [[ -e "$install_dir" ]]; then
echo "The path '$install_dir' is already present. Please run the script again with a different path to install new."
echo "If you're trying to update your existing installation, that happens automatically through WatchTower."
echo_contact_support " if you're facing problems with the auto-updates."
exit
fi
# Check is Docker daemon is installed and available. If not, the install & start Docker for Linux machines. We cannot automatically install Docker Desktop on Mac OS
if ! is_command_present docker ;then
if [ $package_manager == "apt-get" -o $package_manager == "yum" ];then
if ! is_command_present docker; then
if [[ $package_manager == "apt-get" || $package_manager == "yum" ]]; then
install_docker
else
echo ""
@ -229,7 +281,7 @@ if ! is_command_present docker ;then
echo "Docker Desktop must be installed manually on Mac OS to proceed. Docker can only be installed automatically on Ubuntu / Redhat / Cent OS"
echo "https://docs.docker.com/docker-for-mac/install/"
echo "++++++++++++++++++++++++++++++++++++++++++++++++"
exit
exit 1
fi
fi
@ -243,46 +295,40 @@ if [[ $package_manager == "yum" || $package_manager == "apt-get" ]]; then
start_docker
fi
read -p 'Installation Directory [appsmith]: ' install_dir
install_dir="${install_dir:-appsmith}"
mkdir -p "$PWD/$install_dir"
install_dir="$PWD/$install_dir"
read -p 'Is this a fresh installation? [Y/n]' fresh_install
fresh_install="${fresh_install:-Y}"
echo "Installing Appsmith to '$install_dir'."
mkdir -p "$install_dir"
echo ""
if [ $fresh_install == "N" -o $fresh_install == "n" -o $fresh_install == "no" -o $fresh_install == "No" ];then
read -p 'Enter your current mongo db host: ' mongo_host
read -p 'Enter your current mongo root user: ' mongo_root_user
read -sp 'Enter your current mongo password: ' mongo_root_password
read -p 'Enter your current mongo database name: ' mongo_database
# It is possible that this isn't the first installation.
echo ""
read -p 'Do you have any existing data in the database?[Y/n]: ' existing_encrypted_data
existing_encrypted_data=${existing_encrypted_data:-Y}
# In this case be more cautious of auto generating the encryption keys. Err on the side of not generating the encryption keys
if [ $existing_encrypted_data == "N" -o $existing_encrypted_data == "n" -o $existing_encrypted_data == "no" -o $existing_encrypted_data == "No" ];then
auto_generate_encryption="true"
else
auto_generate_encryption="false"
fi
elif [ $fresh_install == "Y" -o $fresh_install == "y" -o $fresh_install == "yes" -o $fresh_install == "Yes" ];then
echo "Appsmith needs to create a mongo db"
if confirm y "Is this a fresh installation?"; then
echo "Appsmith needs to create a MongoDB instance."
mongo_host="mongo"
mongo_database="appsmith"
# We invoke functions to read the mongo credentials from the user because they MUST be non-empty
read_mongo_username
read_mongo_password
# Since the mongo was automatically setup, this must be the first time installation. Generate encryption credentials for this scenario
auto_generate_encryption="true"
else
read -rp 'Enter your current mongo db host: ' mongo_host
read -rp 'Enter your current mongo root user: ' mongo_root_user
read -srp 'Enter your current mongo password: ' mongo_root_password
read -rp 'Enter your current mongo database name: ' mongo_database
# It is possible that this isn't the first installation.
echo ""
# In this case be more cautious of auto generating the encryption keys. Err on the side of not generating the encryption keys
if confirm y "Do you have any existing data in the database?"; then
auto_generate_encryption="false"
else
auto_generate_encryption="true"
fi
fi
echo ""
# urlencoding the Mongo username and password
encoded_mongo_root_user=$( urlencode $mongo_root_user )
encoded_mongo_root_password=$( urlencode $mongo_root_password )
encoded_mongo_root_user=$(urlencode "$mongo_root_user")
encoded_mongo_root_password=$(urlencode "$mongo_root_password")
encryptionEnv=./template/encryption.env
if test -f "$encryptionEnv"; then
@ -290,14 +336,14 @@ if test -f "$encryptionEnv"; then
echo "1) No. Conserve the older encryption password and salt and continue"
echo "2) Yes. Overwrite the existing encryption (NOT SUGGESTED) with autogenerated encryption password and salt"
echo "3) Yes. Overwrite the existing encryption (NOT SUGGESTED) with manually entering the encryption password and salt"
read -p 'Enter option number [1]: ' overwrite_encryption
read -rp 'Enter option number [1]: ' overwrite_encryption
overwrite_encryption=${overwrite_encryption:-1}
auto_generate_encryption="false"
if [[ $overwrite_encryption -eq 1 ]];then
setup_encryption="false"
elif [[ $overwrite_encryption -eq 2 ]];then
setup_encryption="true"
auto_generate_encryption="true"
auto_generate_encryption="true"
elif [[ $overwrite_encryption -eq 3 ]];then
setup_encryption="true"
auto_generate_encryption="false"
@ -309,22 +355,17 @@ fi
if [[ "$setup_encryption" = "true" ]];then
if [[ "$auto_generate_encryption" = "false" ]];then
echo "Please enter the salt and password found in the encyption.env file of your previous appsmith installation "
read -p 'Enter your encryption password: ' user_encryption_password
read -p 'Enter your encryption salt: ' user_encryption_salt
elif [[ "$auto_generate_encryption" = "true" ]];then
# Picked up the following method of generation from : https://gist.github.com/earthgecko/3089509
user_encryption_password=$(cat /dev/urandom | LC_CTYPE=C tr -dc 'a-zA-Z0-9' | fold -w 13 | head -n 1)
user_encryption_salt=$(cat /dev/urandom | LC_CTYPE=C tr -dc 'a-zA-Z0-9' | fold -w 13 | head -n 1)
read -rp 'Enter your encryption password: ' user_encryption_password
read -rp 'Enter your encryption salt: ' user_encryption_salt
elif [[ "$auto_generate_encryption" = "true" ]]; then
user_encryption_password=$(generate_password)
user_encryption_salt=$(generate_password)
fi
fi
echo ""
read -p 'Do you have a custom domain that you would like to link? (Only for cloud installations) [N/y]: ' setup_domain
setup_domain=${setup_domain:-N}
# Setting default value for the setup_ssl variable. Without this, the script errors out in the if condition later
setup_ssl='N'
if [ $setup_domain == "Y" -o $setup_domain == "y" -o $setup_domain == "yes" -o $setup_domain == "Yes" ];then
if confirm n "Do you have a custom domain that you would like to link? (Only for cloud installations)"; then
echo ""
echo "+++++++++++ IMPORTANT PLEASE READ ++++++++++++++++++++++"
echo "Please update your DNS records with your domain registrar"
@ -333,12 +374,9 @@ if [ $setup_domain == "Y" -o $setup_domain == "y" -o $setup_domain == "yes" -o $
echo "+++++++++++++++++++++++++++++++++++++++++++++++"
echo ""
echo "Would you like to provision an SSL certificate for your custom domain / subdomain?"
read -p '(Your DNS records must be updated for us to proceed) [Y/n]: ' setup_ssl
setup_ssl=${setup_ssl:-Y}
fi
if [ $setup_ssl == "Y" -o $setup_ssl == "y" -o $setup_ssl == "yes" -o $setup_ssl == "Yes" ]; then
read -p 'Enter the domain or subdomain on which you want to host appsmith (example.com / app.example.com): ' custom_domain
if confirm y '(Your DNS records must be updated for us to proceed)'; then
read -rp 'Enter the domain or subdomain on which you want to host appsmith (example.com / app.example.com): ' custom_domain
fi
fi
NGINX_SSL_CMNT=""
@ -348,33 +386,34 @@ fi
echo ""
echo "Downloading the configuration templates..."
mkdir -p template
( cd template
curl --remote-name-all --silent --show-error \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/docker-compose.yml.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/init-letsencrypt.sh.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/mongo-init.js.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/docker.env.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/nginx_app.conf.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/encryption.env.sh
templates_dir="$(mktemp -d)"
mkdir -p "$templates_dir"
(
cd "$templates_dir"
curl --remote-name-all --silent --show-error \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/docker-compose.yml.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/init-letsencrypt.sh.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/mongo-init.js.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/docker.env.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/nginx_app.conf.sh \
https://raw.githubusercontent.com/appsmithorg/appsmith/release/deploy/template/encryption.env.sh
)
# Role - Folder
for directory_name in nginx certbot/conf certbot/www mongo/db
do
mkdir -p "$install_dir/data/$directory_name"
done
# Create needed folder structure.
mkdir -p "$install_dir/data/"{nginx,certbot/{conf,www},mongo/db}
echo ""
echo "Generating the configuration files from the templates"
. ./template/nginx_app.conf.sh
. ./template/docker-compose.yml.sh
. ./template/mongo-init.js.sh
. ./template/init-letsencrypt.sh.sh
. ./template/docker.env.sh
if [[ "$setup_encryption" = "true" ]];then
. ./template/encryption.env.sh
bash "$templates_dir/nginx_app.conf.sh" "$NGINX_SSL_CMNT" "$custom_domain" > nginx_app.conf
bash "$templates_dir/docker-compose.yml.sh" "$mongo_root_user" "$mongo_root_password" "$mongo_database" > docker-compose.yml
bash "$templates_dir/mongo-init.js.sh" "$mongo_root_user" "$mongo_root_password" > mongo-init.js
bash "$templates_dir"/init-letsencrypt.sh.sh "$custom_domain" > init-letsencrypt.sh
bash "$templates_dir/docker.env.sh" "$encoded_mongo_root_user" "$encoded_mongo_root_password" "$mongo_host" > docker.env
if [[ "$setup_encryption" = "true" ]]; then
bash "$templates_dir/encryption.env.sh" "$user_encryption_password" "$user_encryption_salt" > encryption.env
fi
rm -rf "$templates_dir"
chmod 0755 init-letsencrypt.sh
overwrite_file "data/nginx/app.conf.template" "nginx_app.conf"
@ -430,5 +469,4 @@ else
echo "Join our Discord server https://discord.com/invite/rBTTVJp"
fi
echo ""
echo -e "Peace out \U1F596\n"
echo -e "\nPeace out \U1F596\n"

View File

@ -1,10 +1,12 @@
#!/bin/sh
#!/bin/bash
if [ ! -f docker-compose.yml ]; then
touch docker-compose.yml
fi
set -o nounset
cat >| docker-compose.yml << EOF
mongo_root_user="$1"
mongo_root_password="$2"
mongo_database="$3"
cat <<EOF
version: "3.7"
services:
@ -21,6 +23,8 @@ services:
command: "/bin/sh -c 'while :; do sleep 6h & wait \$\${!}; nginx -s reload; done & /start-nginx.sh'"
depends_on:
- appsmith-internal-server
labels:
com.centurylinklabs.watchtower.enable: "true"
networks:
- appsmith
@ -44,6 +48,8 @@ services:
- mongo
depends_on:
- mongo
labels:
com.centurylinklabs.watchtower.enable: "true"
networks:
- appsmith
@ -52,7 +58,7 @@ services:
expose:
- "27017"
environment:
- MONGO_INITDB_DATABASE=appsmith
- MONGO_INITDB_DATABASE=$mongo_database
- MONGO_INITDB_ROOT_USERNAME=$mongo_root_user
- MONGO_INITDB_ROOT_PASSWORD=$mongo_root_password
volumes:
@ -68,8 +74,16 @@ services:
networks:
- appsmith
watchtower:
image: containrrr/watchtower
volumes:
- /var/run/docker.sock:/var/run/docker.sock
# Update check interval in seconds.
command: --interval 300 --label-enable
networks:
- appsmith
networks:
appsmith:
driver: bridge
EOF

View File

@ -1,10 +1,12 @@
#!/bin/sh
#!/bin/bash
if [ ! -f docker-compose.yml ]; then
touch docker-compose.yml
fi
set -o nounset
cat >| docker.env << EOF
encoded_mongo_root_user="$1"
encoded_mongo_root_password="$2"
mongo_host="$3"
cat << EOF
# Read our documentation on how to configure these features
# https://docs.appsmith.com/v/v1.1/enabling-3p-services

View File

@ -1,11 +1,11 @@
#!/bin/sh
#!/bin/bash
if [ ! -f encryption.env ]; then
touch encryption.env
fi
set -o nounset
cat >| encryption.env << EOF
user_encryption_password="$1"
user_encryption_salt="$2"
cat <<EOF
APPSMITH_ENCRYPTION_PASSWORD=$user_encryption_password
APPSMITH_ENCRYPTION_SALT=$user_encryption_salt
EOF

View File

@ -1,12 +1,10 @@
#!/bin/sh
#!/bin/bash
if [ ! -f init-letsencrypt.sh ]; then
touch init-letsencrypt.sh
fi
set -o nounset
custom_domain="$1"
cat >| init-letsencrypt.sh << EOF
cat << EOF
#!/bin/bash
if ! [ -x "\$(command -v docker-compose)" ]; then

View File

@ -1,12 +1,11 @@
#!/bin/sh
#!/bin/bash
if [ ! -f mongo-init.js ]; then
touch mongo-init.js
fi
set -o nounset
mongo_root_user="$1"
mongo_root_password="$2"
cat >| mongo-init.js << EOF
cat << EOF
let error = false
print("**** Going to start Mongo seed ****")
@ -1534,7 +1533,4 @@ printjson(res)
if (error) {
print('Error occurred while inserting the records')
}
EOF

View File

@ -1,15 +1,16 @@
#!/bin/sh
#!/bin/bash
if [ ! -f nginx_app.conf ]; then
touch nginx_app.conf
fi
set -o nounset
# This template file is different from the others because of the sub_filter commands in the Nginx configuration
# Those variables are substituted inside the Docker container for appsmith-editor during bootup.
# Hence we wish to prevent environment substitution here.
# Relevant variables will be replaced at the end of this file via sed command
# In the config file, there's three types of variables, all represented with the syntax `$name`. The ones that are not
# escaped with a backslash are rendered within this script. Among the ones that are escaped with a backslash, the ones
# starting with `APPSMITH_` will be rendered at boot-up time by appsmith-editor docker container. The rest (like $scheme
# and $host) are for nginx to work out.
content='
NGINX_SSL_CMNT="$1"
custom_domain="$2"
cat <<EOF
server {
listen 80;
$NGINX_SSL_CMNT server_name $custom_domain ;
@ -24,29 +25,29 @@ $NGINX_SSL_CMNT server_name $custom_domain ;
root /var/www/certbot;
}
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto \$scheme;
proxy_set_header X-Forwarded-Host \$host;
location / {
try_files $uri /index.html =404;
try_files \$uri /index.html =404;
sub_filter __APPSMITH_SENTRY_DSN__ '\''${APPSMITH_SENTRY_DSN}'\'';
sub_filter __APPSMITH_SMART_LOOK_ID__ '\''${APPSMITH_SMART_LOOK_ID}'\'';
sub_filter __APPSMITH_OAUTH2_GOOGLE_CLIENT_ID__ '\''${APPSMITH_OAUTH2_GOOGLE_CLIENT_ID}'\'';
sub_filter __APPSMITH_OAUTH2_GITHUB_CLIENT_ID__ '\''${APPSMITH_OAUTH2_GITHUB_CLIENT_ID}'\'';
sub_filter __APPSMITH_MARKETPLACE_ENABLED__ '\''${APPSMITH_MARKETPLACE_ENABLED}'\'';
sub_filter __APPSMITH_SEGMENT_KEY__ '\''${APPSMITH_SEGMENT_KEY}'\'';
sub_filter __APPSMITH_OPTIMIZELY_KEY__ '\''${APPSMITH_OPTIMIZELY_KEY}'\'';
sub_filter __APPSMITH_ALGOLIA_API_ID__ '\''${APPSMITH_ALGOLIA_API_ID}'\'';
sub_filter __APPSMITH_ALGOLIA_SEARCH_INDEX_NAME__ '\''${APPSMITH_ALGOLIA_SEARCH_INDEX_NAME}'\'';
sub_filter __APPSMITH_ALGOLIA_API_KEY__ '\''${APPSMITH_ALGOLIA_API_KEY}'\'';
sub_filter __APPSMITH_CLIENT_LOG_LEVEL__ '\''${APPSMITH_CLIENT_LOG_LEVEL}'\'';
sub_filter __APPSMITH_GOOGLE_MAPS_API_KEY__ '\''${APPSMITH_GOOGLE_MAPS_API_KEY}'\'';
sub_filter __APPSMITH_TNC_PP__ '\''${APPSMITH_TNC_PP}'\'';
sub_filter __APPSMITH_VERSION_ID__ '\''${APPSMITH_VERSION_ID}'\'';
sub_filter __APPSMITH_VERSION_RELEASE_DATE__ '\''${APPSMITH_VERSION_RELEASE_DATE}'\'';
sub_filter __APPSMITH_INTERCOM_APP_ID__ '\''${APPSMITH_INTERCOM_APP_ID}'\'';
sub_filter __APPSMITH_MAIL_ENABLED__ '\''${APPSMITH_MAIL_ENABLED}'\'';
sub_filter __APPSMITH_SENTRY_DSN__ '\${APPSMITH_SENTRY_DSN}';
sub_filter __APPSMITH_SMART_LOOK_ID__ '\${APPSMITH_SMART_LOOK_ID}';
sub_filter __APPSMITH_OAUTH2_GOOGLE_CLIENT_ID__ '\${APPSMITH_OAUTH2_GOOGLE_CLIENT_ID}';
sub_filter __APPSMITH_OAUTH2_GITHUB_CLIENT_ID__ '\${APPSMITH_OAUTH2_GITHUB_CLIENT_ID}';
sub_filter __APPSMITH_MARKETPLACE_ENABLED__ '\${APPSMITH_MARKETPLACE_ENABLED}';
sub_filter __APPSMITH_SEGMENT_KEY__ '\${APPSMITH_SEGMENT_KEY}';
sub_filter __APPSMITH_OPTIMIZELY_KEY__ '\${APPSMITH_OPTIMIZELY_KEY}';
sub_filter __APPSMITH_ALGOLIA_API_ID__ '\${APPSMITH_ALGOLIA_API_ID}';
sub_filter __APPSMITH_ALGOLIA_SEARCH_INDEX_NAME__ '\${APPSMITH_ALGOLIA_SEARCH_INDEX_NAME}';
sub_filter __APPSMITH_ALGOLIA_API_KEY__ '\${APPSMITH_ALGOLIA_API_KEY}';
sub_filter __APPSMITH_CLIENT_LOG_LEVEL__ '\${APPSMITH_CLIENT_LOG_LEVEL}';
sub_filter __APPSMITH_GOOGLE_MAPS_API_KEY__ '\${APPSMITH_GOOGLE_MAPS_API_KEY}';
sub_filter __APPSMITH_TNC_PP__ '\${APPSMITH_TNC_PP}';
sub_filter __APPSMITH_VERSION_ID__ '\${APPSMITH_VERSION_ID}';
sub_filter __APPSMITH_VERSION_RELEASE_DATE__ '\${APPSMITH_VERSION_RELEASE_DATE}';
sub_filter __APPSMITH_INTERCOM_APP_ID__ '\${APPSMITH_INTERCOM_APP_ID}';
sub_filter __APPSMITH_MAIL_ENABLED__ '\${APPSMITH_MAIL_ENABLED}';
}
location /f {
@ -76,32 +77,32 @@ $NGINX_SSL_CMNT
$NGINX_SSL_CMNT include /etc/letsencrypt/options-ssl-nginx.conf;
$NGINX_SSL_CMNT ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT proxy_set_header X-Forwarded-Proto $scheme;
$NGINX_SSL_CMNT proxy_set_header X-Forwarded-Host $host;
$NGINX_SSL_CMNT proxy_set_header X-Forwarded-Proto \$scheme;
$NGINX_SSL_CMNT proxy_set_header X-Forwarded-Host \$host;
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT root /var/www/appsmith;
$NGINX_SSL_CMNT index index.html index.htm;
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT location / {
$NGINX_SSL_CMNT try_files $uri /index.html =404;
$NGINX_SSL_CMNT try_files \$uri /index.html =404;
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT sub_filter __APPSMITH_SENTRY_DSN__ '\''${APPSMITH_SENTRY_DSN}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_SMART_LOOK_ID__ '\''${APPSMITH_SMART_LOOK_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OAUTH2_GOOGLE_CLIENT_ID__ '\''${APPSMITH_OAUTH2_GOOGLE_CLIENT_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OAUTH2_GITHUB_CLIENT_ID__ '\''${APPSMITH_OAUTH2_GITHUB_CLIENT_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_MARKETPLACE_ENABLED__ '\''${APPSMITH_MARKETPLACE_ENABLED}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_SEGMENT_KEY__ '\''${APPSMITH_SEGMENT_KEY}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OPTIMIZELY_KEY__ '\''${APPSMITH_OPTIMIZELY_KEY}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_API_ID__ '\''${APPSMITH_ALGOLIA_API_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_SEARCH_INDEX_NAME__ '\''${APPSMITH_ALGOLIA_SEARCH_INDEX_NAME}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_API_KEY__ '\''${APPSMITH_ALGOLIA_API_KEY}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_CLIENT_LOG_LEVEL__ '\''${APPSMITH_CLIENT_LOG_LEVEL}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_GOOGLE_MAPS_API_KEY__ '\''${APPSMITH_GOOGLE_MAPS_API_KEY}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_TNC_PP__ '\''${APPSMITH_TNC_PP}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_VERSION_ID__ '\''${APPSMITH_VERSION_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_VERSION_RELEASE_DATE__ '\''${APPSMITH_VERSION_RELEASE_DATE}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_INTERCOM_APP_ID__ '\''${APPSMITH_INTERCOM_APP_ID}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_MAIL_ENABLED__ '\''${APPSMITH_MAIL_ENABLED}'\'';
$NGINX_SSL_CMNT sub_filter __APPSMITH_SENTRY_DSN__ '\${APPSMITH_SENTRY_DSN}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_SMART_LOOK_ID__ '\${APPSMITH_SMART_LOOK_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OAUTH2_GOOGLE_CLIENT_ID__ '\${APPSMITH_OAUTH2_GOOGLE_CLIENT_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OAUTH2_GITHUB_CLIENT_ID__ '\${APPSMITH_OAUTH2_GITHUB_CLIENT_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_MARKETPLACE_ENABLED__ '\${APPSMITH_MARKETPLACE_ENABLED}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_SEGMENT_KEY__ '\${APPSMITH_SEGMENT_KEY}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_OPTIMIZELY_KEY__ '\${APPSMITH_OPTIMIZELY_KEY}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_API_ID__ '\${APPSMITH_ALGOLIA_API_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_SEARCH_INDEX_NAME__ '\${APPSMITH_ALGOLIA_SEARCH_INDEX_NAME}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_ALGOLIA_API_KEY__ '\${APPSMITH_ALGOLIA_API_KEY}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_CLIENT_LOG_LEVEL__ '\${APPSMITH_CLIENT_LOG_LEVEL}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_GOOGLE_MAPS_API_KEY__ '\${APPSMITH_GOOGLE_MAPS_API_KEY}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_TNC_PP__ '\${APPSMITH_TNC_PP}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_VERSION_ID__ '\${APPSMITH_VERSION_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_VERSION_RELEASE_DATE__ '\${APPSMITH_VERSION_RELEASE_DATE}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_INTERCOM_APP_ID__ '\${APPSMITH_INTERCOM_APP_ID}';
$NGINX_SSL_CMNT sub_filter __APPSMITH_MAIL_ENABLED__ '\${APPSMITH_MAIL_ENABLED}';
$NGINX_SSL_CMNT }
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT location /f {
@ -121,8 +122,4 @@ $NGINX_SSL_CMNT proxy_pass http://appsmith-internal-server:8080;
$NGINX_SSL_CMNT }
$NGINX_SSL_CMNT
$NGINX_SSL_CMNT }
'
echo "$content" \
| sed -e "s/\$NGINX_SSL_CMNT/$NGINX_SSL_CMNT/g" -e "s/\$custom_domain/$custom_domain/g" \
>| nginx_app.conf
EOF