From 05bd0366d18192b9cf69db2dadc346b57324ef56 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Tue, 17 Aug 2021 02:14:17 +0000 Subject: [PATCH 01/25] custom postgres confings for clones support + fix for dumps on S3 --- README.md | 183 +--------------------------------- dle-logical-init.sh.tpl | 36 +++++-- instance.tf | 13 +++ postgresql_clones_custom.conf | 1 + role.tf | 9 +- variables.tf | 6 +- 6 files changed, 54 insertions(+), 194 deletions(-) create mode 100644 postgresql_clones_custom.conf diff --git a/README.md b/README.md index 9069447..c1bd25c 100644 --- a/README.md +++ b/README.md @@ -1,192 +1,17 @@ [[_TOC_]] -# How to setup Database Lab using Terraform in AWS +# Database Lab Terraform Module This [Terraform Module](https://www.terraform.io/docs/language/modules/index.html) is responsible for deploying the [Database Lab Engine](https://gitlab.com/postgres-ai/database-lab) to cloud hosting providers. Your source PostgreSQL database can be located anywhere, but DLE with other components will be created on an EC2 instance under your AWS account. Currently, only "logical" mode of data retrieval (dump/restore) is supported – the only available method for managed PostgreSQL cloud services such as RDS Postgres, RDS Aurora Postgres, Azure Postgres, or Heroku. "Physical" mode is not yet supported, but it will be in the future. More about various data retrieval options for DLE: https://postgres.ai/docs/how-to-guides/administration/data. -## Supported Cloud Platforms: +## Supported Cloud Platforms - AWS -## Prerequisites -- [AWS Account](https://aws.amazon.com) -- [Terraform Installed](https://learn.hashicorp.com/tutorials/terraform/install-cli) (minimal version: 1.0.0) -- AWS [Route 53](https://aws.amazon.com/route53/) Hosted Zone (For setting up TLS) for a domain or sub-domain you control -- You must have AWS Access Keys and a default region in your Terraform environment (See section on required IAM Permissions) -- The DLE runs on an EC2 instance which can be accessed using a selected set of SSH keys uploaded to EC2. Use the Terraform parameter `aws_keypair` to specify which EC2 Keypair to use -- Required IAM Permissions: to successfully run this Terraform module, the IAM User/Role must have the following permissions: - * Read/Write permissions on EC2 - * Read/Write permissions on Route53 - * Read/Write permissions on Cloudwatch +## Installation -## Configuration overview -- :construction: Currently, it is supposed that you run `terraform` commands on a Linux machine. MacOS and Windows support is not yet implemented (but planned). -- It is recommended to clone this Git repository and adjust for your needs. Below we provide the detailed step-by-step instructions for quick start (see "Quick start") for a PoC setup -- To configure parameters used by Terraform (and the Database Lab Engine itself), you will need to modify `terraform.tfvars` and create a file with secrets (`secret.tfvars`) -- This Terraform module can be run independently or combined with any other standard Terraform module. You can learn more about using Terraform and the Terraform CLI [here](https://www.terraform.io/docs/cli/commands/index.html) -- The variables can be set in multiple ways with the following precedence order (lowest to highest): - - default values in `variables.tf` - - values defined in `terraform.tfvars` - - values passed on the command line -- All variables starting with `postgres_` represent the source database connection information for the data (from that database) to be fetched by the DLE. That database must be accessible from the instance hosting the DLE (that one created by Terraform) - -## How-to guide: using this Terraform module to set up DLE and its components -The following steps were tested on Ubuntu 20.04 but supposed to be valid for other Linux distributions without significant modification. - -1. SSH to any machine with internet access, it will be used as deployment machine -1. Install Terraform https://learn.hashicorp.com/tutorials/terraform/install-cli. Example for Ubuntu: - ```shell - sudo apt-get update && sudo apt-get install -y gnupg software-properties-common curl - curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - - sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" # Adjust if you have ARM platform. - sudo apt-get update && sudo apt-get install terraform - # Verify installation. - terraform -help - ``` -1. Get TF code for Database Lab: - ```shell - git clone https://gitlab.com/postgres-ai/database-lab-infrastructure.git - cd database-lab-infrastructure/ - ``` -1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: - ```config - dle_version_full = "2.4.1" - - aws_ami_name = "DBLABserver*" - aws_keypair = "YOUR_AWS_KEYPAIR" - - aws_deploy_region = "us-east-1" - aws_deploy_ebs_availability_zone = "us-east-1a" - aws_deploy_ec2_instance_type = "t2.large" - aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" - aws_deploy_ebs_size = "40" - aws_deploy_ebs_type = "gp2" - aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] - aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}-engine.aws.postgres - - # Source – two options. Choose one of two: - # - direct connection to source DB - # - dump stored on AWS S3 - - # option 1 – direct PG connection - source_type = "postgres" # source is working dome postgres database - source_postgres_version = "13" - source_postgres_host = "ec2-3-215-57-87.compute-1.amazonaws.com" # an example DB at Heroku - source_postgres_port = "5432" - source_postgres_dbname = "d3dljqkrnopdvg" # an example DB at Heroku - source_postgres_username = "bfxuriuhcfpftt" # an example DB at Heroku - - # option 2 – dump on S3. Important: your AWS user has to be able to create IAM roles to work with S3 buckets in your AWS account - # source_type = 's3' # source is dump stored on demo s3 bucket - # source_pgdump_s3_bucket = "tf-demo-dump" # This is an example public bucket - # source_pgdump_path_on_s3_bucket = "heroku.dmp" # This is an example dump from demo database - - dle_debug_mode = "true" - dle_retrieval_refresh_timetable = "0 0 * * 0" - postgres_config_shared_preload_libraries = "pg_stat_statements,logerrors" # DB Migration Checker requires logerrors extension - - platform_project_name = "aws_test_tf" - ``` -1. Create `secret.tfvars` containing `source_postgres_password`, `platform_access_token`, and `vcs_github_secret_token`. An example: - ```config - source_postgres_password = "dfe01cbd809a71efbaecafec5311a36b439460ace161627e5973e278dfe960b7" # an example DB at Heroku - platform_access_token = "YOUR_ACCESS_TOKEN" # to generate, open https://console.postgres.ai/, choose your organization, - # then "Access tokens" in the left menu - vcs_github_secret_token = "vcs_secret_token" # generate a personal access token with scope of "repo" - ``` - To generate a personal GitHub access token with the scope of "repo", open the [guide on GitHub Docs](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) and follow the instructions. - - Note that the "repo" scope essentially gives full access to all user-controlled repositories. Should you have any concerns about which repositories the DLE can have access to, consider using a separate GitHub account that has access to the reduced number of repositories. -1. Initialize - ```shell - terraform init - ``` -1. Set environment variables with AWS credentials: - ```shell - export AWS_ACCESS_KEY_ID = "keyid" # todo: how to get it? - export AWS_SECRET_ACCESS_KEY = "accesskey" - ``` -1. Deploy: - ```shell - terraform apply -var-file="secret.tfvars" -auto-approve - ``` -1. If everything goes well, you should get an output like this: - ```config - vcs_db_migration_checker_verification_token = "gsio7KmgaxECfJ80kUx2tUeIf4kEXZex" - dle_verification_token = "zXPodd13LyQaKgVXGmSCeB8TUtnGNnIa" - ec2_public_dns = "ec2-11-111-111-11.us-east-2.compute.amazonaws.com" - ec2instance = "i-0000000000000" - ip = "11.111.111.11" - platform_joe_signing_secret = "lG23qZbUh2kq0ULIBfW6TRwKzqGZu1aP" - public_dns_name = "demo-api-engine.aws.postgres.ai" # todo: this should be URL, not hostname – further we'll need URL, with protocol – `https://` - ``` - -1. To verify result and check the progress, you might want to connect to the just-created EC2 machine using IP address or hostname from the Terraform output. In our example, it can be done using this one-liner (you can find more about DLE logs and configuration on this page: https://postgres.ai/docs/how-to-guides/administration/engine-manage): - ```shell - echo "sudo docker logs dblab_server -f" | ssh ubuntu@18.118.126.25 -i postgres_ext_test.pem - ``` - - Once you see the message like: - ``` - 2021/07/02 10:28:51 [INFO] Server started listening on :2345. - ``` - – it means that the DLE server started successfully and is waiting for you commands - - 1. Sign in to the [Postgres.ai Platform](https://console.postgres.ai/) and register your new DLE server: - 1. Go to `Database Lab > Instances` in the left menu - 1. Press the "Add instance" button - 1. `Project` – specify any name (this is how your DLE server will be named in the platform) - 1. `Verification token` – use the token generated above (`verification_token` value); do NOT press the "Generate" button here - 1. `URL` – use the value generated above // todo: not convenient, we need URL but reported was only hostname - 1. Press the "Verify URL" button to check the connectivity. Then press "Add". If everything is right, you should see the DLE page with green "OK" status: - - 1. Add Joe chatbot for efficient SQL optimization workflow: - 1. Go to the "SQL Optimization > Ask Joe" page using the left menu, click the "Add instance" button, specify the same project as you defined in the previous step - 1. `Signing secret` – use `platform_joe_signing_secret` from the Terraform output - 1. `URL` – use `public_dns_name` values from the Terraform output with port `444`; in our example, it's `https://demo-api-engine.aws.postgres.ai:444` - 1. Press "Verify URL" to check connectivity and then press "Add". You should see: - - - Now you can start using Joe chatbot for SQL execution plans troubleshooting and verification of optimization ideas. As a quick test, go to `SQL Optimization > Ask Joe` in the left menu, and enter `\dt+` command (a psql command to show the list of tables with sizes). You should see how Joe created a thin clone behind the scenes and immediately ran this psql command, presenting the result to you: - - -1. Set up [DB migration checker](https://postgres.ai/docs/db-migration-checker). Prepare a repository with your DB migrations(Flyway, Sqitch, Liquibase, etc.): - 1. Add secrets: - - `DLMC_CI_ENDPOINT` - an endpoint of your Database Lab Migration Checker service – use `vcs_db_migration_checker_registration_url` from the Terraform output - - `DLMC_VERIFICATION_TOKEN` - verification token for the Database Lab Migration Checker API – use `vcs_db_migration_checker_verification_token` from the Terraform output - 1. Configure a new workflow in the created repository (see an example of configuration: https://github.com/postgres-ai/green-zone/blob/master/.github/workflows/main.yml) - - add a custom action: https://github.com/marketplace/actions/database-lab-realistic-db-testing-in-ci - - provide input params for the action (the full list of available input params) - - provide environment variables: - - `DLMC_CI_ENDPOINT` - use a CI Checker endpoint from the repository secrets - - `DLMC_VERIFICATION_TOKEN` - use a verification token from the repository secrets - -1. Install and try the client CLI (`dblab`) - 1. Follow the [guide](https://postgres.ai/docs/how-to-guides/cli/cli-install-init) to install Database Lab CLI - 1. Initialize CLI: - ```shell - dblab init --environment-id= --url=https:// --token= - ``` - 1. Try it: - ```shell - dblab instance status - ``` - It should return the OK status: - ```json - { - "status": { - "code": "OK", - "message": "Instance is ready" - }, - ... - } - ``` - -## Important Note -When the DLE creates new database clones, it makes them available on incremental ports in the 6000 range (e.g. 6000, 6001, ...). The DLE CLI will also report that the clone is available on a port in the 6000 range. However, please note that these are the ports when accessing the DLE from `localhost`. This Terraform module deploys [Envoy](https://www.envoyproxy.io/) to handle SSL termination and port forwarding to connect to DLE generated clones. - -Bottom Line: When connecting to clones, add `3000` to the port number reported by the DLE CLI to connect to the clone. for example, if the CLI reports that a new clone is available at port `6001` connect that clone at port `9001`. +Follow the [how-to guide](https://postgres.ai/docs/how-to-guides/administration/install-database-lab-with-terraform) to install Database Lab with Terraform on AWS ## Known Issues ### Certificate Authority Authorization (CAA) for your Hosted Zone diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index d873b26..ea5fc6e 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -4,10 +4,10 @@ set -x sleep 20 #run certbot and copy files to envoy -# to avoid restrinctions from letsencrypt like "There were too many requests of a given type :: +# to avoid restrictions from letsencrypt like "There were too many requests of a given type :: # Error creating new order :: too many certificates (5) already issued for this exact set of domains # in the last 168 hours: demo-api-engine.aws.postgres.ai: see https://letsencrypt.org/docs/rate-limits/" -# follwing three lines were commented out and mocked up. In real implementation inline certs have to be +# following three lines were commented out and mocked up. In real implementation inline certs have to be # removed and letsencrypt generated certs should be used @@ -100,8 +100,14 @@ for i in $${!disks[@]}; do done # Adjust DLE config -mkdir ~/.dblab +mkdir -p ~/.dblab/postgres_conf/ + curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/config.example.logical_generic.yml --output ~/.dblab/server.yml +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/postgres/pg_hba.conf \ + --output ~/.dblab/postgres_conf/pg_hba.conf +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/postgres/postgresql.conf --output ~/.dblab/postgres_conf/postgresql.conf +cat /tmp/postgresql_clones_custom.conf >> ~/.dblab/postgres_conf/postgresql.conf + sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/server.yml sed -ri "s/^(\s*)(verificationToken:.*$)/\1verificationToken: ${dle_verification_token}/" ~/.dblab/server.yml sed -ri "s/^(\s*)(timetable:.*$)/\1timetable: \"${dle_retrieval_refresh_timetable}\"/" ~/.dblab/server.yml @@ -117,11 +123,14 @@ sed -ri "s/:13/:${source_postgres_version}/g" ~/.dblab/server.yml case "${source_type}" in postgres) + # Mount directory to store dump files. + extra_mount="--volume /var/lib/dblab/dblab_pool_00/dump:/var/lib/dblab/dblab_pool/dump" + sed -ri "s/^(\s*)(host: 34.56.78.90$)/\1host: ${source_postgres_host}/" ~/.dblab/server.yml sed -ri "s/^(\s*)(port: 5432$)/\1port: ${source_postgres_port}/" ~/.dblab/server.yml sed -ri "s/^(\s*)( username: postgres$)/\1 username: ${source_postgres_username}/" ~/.dblab/server.yml sed -ri "s/^(\s*)(password:.*$)/\1password: ${source_postgres_password}/" ~/.dblab/server.yml - #restore pg_dump via pipe - without saving it on the disk + # restore pg_dump via pipe - without saving it on the disk sed -ri "s/^(\s*)(parallelJobs:.*$)/\1parallelJobs: 1/" ~/.dblab/server.yml sed -ri "s/^(\s*)(# immediateRestore:.*$)/\1immediateRestore: /" ~/.dblab/server.yml sed -ri "s/^(\s*)(# forceInit: false.*$)/\1 forceInit: true /" ~/.dblab/server.yml @@ -134,10 +143,14 @@ case "${source_type}" in s3) # Mount S3 bucket if it's defined in Terraform variables mkdir -p "${source_pgdump_s3_mount_point}" - s3fs ${source_pgdump_s3_bucket} ${source_pgdump_s3_mount_point} -o iam_role -o use_cache=/tmp -o allow_other + s3fs ${source_pgdump_s3_bucket} ${source_pgdump_s3_mount_point} -o iam_role -o allow_other + extra_mount="--volume ${source_pgdump_s3_mount_point}:${source_pgdump_s3_mount_point}" + sed -ri "s/^(\s*)(- logicalDump.*$)/\1#- logicalDump /" ~/.dblab/server.yml sed -ri "s|^(\s*)( dumpLocation:.*$)|\1 dumpLocation: ${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}|" ~/.dblab/server.yml + sed -ri '/is always single-threaded./{n;s/.*/ parallelJobs: '${postgres_dump_parallel_jobs}'/}' ~/.dblab/server.yml + sed -ri '/jobs to restore faster./{n;s/.*/ parallelJobs: '$(getconf _NPROCESSORS_ONLN)'/}' ~/.dblab/server.yml ;; esac @@ -148,9 +161,10 @@ sudo docker run \ --privileged \ --publish 2345:2345 \ --volume /var/run/docker.sock:/var/run/docker.sock \ - --volume /var/lib/dblab/dblab_pool_00/dump:/var/lib/dblab/dblab_pool/dump \ --volume /var/lib/dblab:/var/lib/dblab/:rshared \ --volume ~/.dblab/server.yml:/home/dblab/configs/config.yml \ + --volume /root/.dblab/postgres_conf:/home/dblab/configs/postgres \ + $extra_mount \ --env DOCKER_API_VERSION=1.39 \ --detach \ --restart on-failure \ @@ -162,13 +176,15 @@ for i in {1..30000}; do sleep 10 done +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/scripts/cli_install.sh | bash +sudo mv ~/.dblab/dblab /usr/local/bin/dblab dblab init \ --environment-id=tutorial \ --url=http://localhost:2345 \ --token=${dle_verification_token} \ --insecure -#configure and run Joe Bot container +# Configure and run Joe Bot container. cp /home/ubuntu/joe.yml ~/.dblab/joe.yml sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/joe.yml sed -ri "s/^(\s*)( token:.*$)/\1 token: ${platform_access_token}/" ~/.dblab/joe.yml @@ -186,8 +202,8 @@ sudo docker run \ --detach \ postgresai/joe:latest -#configure and run DB Migration Checker -curl https://gitlab.com/postgres-ai/database-lab/-/raw/master/configs/config.example.run_ci.yaml --output ~/.dblab/run_ci.yaml +# Configure and run DB Migration Checker. +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/config.example.run_ci.yaml --output ~/.dblab/run_ci.yaml sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/run_ci.yaml sed -ri "s/^(\s*)( verificationToken: \"secret_token\".*$)/\1 verificationToken: ${vcs_db_migration_checker_verification_token}/" ~/.dblab/run_ci.yaml sed -ri "s/^(\s*)( url: \"https\\:\\/\\/dblab.domain.com\"$)/\1 url: \"http\\:\\/\\/dblab_server\\:2345\"/" ~/.dblab/run_ci.yaml @@ -200,4 +216,4 @@ sudo docker run --name dblab_ci_checker -it --detach \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume /tmp/ci_checker:/tmp/ci_checker \ --volume ~/.dblab/run_ci.yaml:/home/dblab/configs/run_ci.yaml \ -postgresai/dblab-ci-checker:2.4.1 +postgresai/dblab-ci-checker:${dle_version_full} diff --git a/instance.tf b/instance.tf index 036b2bc..4b8ddb1 100644 --- a/instance.tf +++ b/instance.tf @@ -40,6 +40,7 @@ data "template_file" "init" { source_postgres_password = "${var.source_postgres_password}" source_postgres_version = "${var.source_postgres_version}" postgres_config_shared_preload_libraries = "${var.postgres_config_shared_preload_libraries}" + postgres_dump_parallel_jobs = "${var.postgres_dump_parallel_jobs}" platform_access_token = "${var.platform_access_token}" platform_project_name = "${var.platform_project_name}" platform_joe_signing_secret = "${random_string.platform_joe_signing_secret.result}" @@ -61,4 +62,16 @@ resource "aws_instance" "aws_ec2" { tags = "${local.common_tags}" iam_instance_profile = "${var.source_type == "s3" ? "${aws_iam_instance_profile.instance_profile[0].name}" : null}" user_data = "${data.template_file.init.rendered}" + + provisioner "file" { + source = "postgresql_clones_custom.conf" + destination = "/tmp/postgresql_clones_custom.conf" + + connection { + type = "ssh" + user = "ubuntu" + private_key = "${file("ubuntu.pem")}" + host = "${self.public_dns}" + } + } } diff --git a/postgresql_clones_custom.conf b/postgresql_clones_custom.conf new file mode 100644 index 0000000..da1b437 --- /dev/null +++ b/postgresql_clones_custom.conf @@ -0,0 +1 @@ +log_min_duration_statement = 1000 diff --git a/role.tf b/role.tf index 1e419a4..5a2cd69 100644 --- a/role.tf +++ b/role.tf @@ -1,6 +1,6 @@ resource "aws_iam_role" "db_lab_engine_role" { count = "${var.source_type == "s3" ? 1 : 0}" - name = "database_lab_engine" + name_prefix = "database_lab_engine_" assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ @@ -35,8 +35,9 @@ resource "aws_iam_role" "db_lab_engine_role" { }) } } + resource "aws_iam_instance_profile" "instance_profile" { - count = "${var.source_type == "s3" ? 1 : 0}" - name = "dle-instance-profile" - role = "${aws_iam_role.db_lab_engine_role[0].name}" + count = "${var.source_type == "s3" ? 1 : 0}" + name_prefix = "dle_instance_profile_" + role = "${aws_iam_role.db_lab_engine_role[0].name}" } diff --git a/variables.tf b/variables.tf index 22485d9..bdfa4ee 100644 --- a/variables.tf +++ b/variables.tf @@ -149,7 +149,7 @@ variable "postgres_config_shared_preload_libraries" { } variable "source_type" { - description = "Type of data source used for DLE. For now it can be postgres,S3" + description = "Type of data source used for DLE. For now it can be postgres or S3" default = "postgres" } @@ -168,3 +168,7 @@ variable "source_pgdump_s3_mount_point"{ default = "/s3/pg_dump" } +variable "postgres_dump_parallel_jobs"{ + description = "DLE config parallelJobs parameter value" + default = "2" +} From d81a3cee55a212dcf942ca0ea1045cdef43f0424 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Wed, 18 Aug 2021 21:51:18 +0000 Subject: [PATCH 02/25] Switch to GitLab Container registry --- dle-logical-init.sh.tpl | 54 ++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index ea5fc6e..82f5ae2 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -156,19 +156,19 @@ case "${source_type}" in esac sudo docker run \ - --name dblab_server \ - --label dblab_control \ - --privileged \ - --publish 2345:2345 \ - --volume /var/run/docker.sock:/var/run/docker.sock \ - --volume /var/lib/dblab:/var/lib/dblab/:rshared \ - --volume ~/.dblab/server.yml:/home/dblab/configs/config.yml \ - --volume /root/.dblab/postgres_conf:/home/dblab/configs/postgres \ - $extra_mount \ - --env DOCKER_API_VERSION=1.39 \ - --detach \ - --restart on-failure \ - postgresai/dblab-server:${dle_version_full} + --name dblab_server \ + --label dblab_control \ + --privileged \ + --publish 2345:2345 \ + --volume /var/run/docker.sock:/var/run/docker.sock \ + --volume /var/lib/dblab:/var/lib/dblab/:rshared \ + --volume ~/.dblab/server.yml:/home/dblab/configs/config.yml \ + --volume /root/.dblab/postgres_conf:/home/dblab/configs/postgres \ + $extra_mount \ + --env DOCKER_API_VERSION=1.39 \ + --detach \ + --restart on-failure \ + registry.gitlab.com/postgres-ai/database-lab/dblab-server:${dle_version_full} ### Waiting for the Database Lab Engine initialization. for i in {1..30000}; do @@ -195,12 +195,12 @@ sed -ri "s/^(\s*)(signingSecret:.*$)/\1signingSecret: ${platform_joe_signing_sec sed -ri "s/^(\s*)(project:.*$)/\1project: ${platform_project_name}/" ~/.dblab/joe.yml sudo docker run \ - --name joe_bot \ - --network=host \ - --restart=on-failure \ - --volume ~/.dblab/joe.yml:/home/config/config.yml \ - --detach \ -postgresai/joe:latest + --name joe_bot \ + --network=host \ + --restart=on-failure \ + --volume ~/.dblab/joe.yml:/home/config/config.yml \ + --detach \ + postgresai/joe:latest # Configure and run DB Migration Checker. curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/config.example.run_ci.yaml --output ~/.dblab/run_ci.yaml @@ -211,9 +211,13 @@ sed -ri "s/^(\s*)( verificationToken: \"checker_secret_token\".*$)/\1 verifica sed -ri "s/^(\s*)( accessToken:.*$)/\1 accessToken: ${platform_access_token}/" ~/.dblab/run_ci.yaml sed -ri "s/^(\s*)( token:.*$)/\1 token: ${vcs_github_secret_token}/" ~/.dblab/run_ci.yaml -sudo docker run --name dblab_ci_checker -it --detach \ ---publish 2500:2500 \ ---volume /var/run/docker.sock:/var/run/docker.sock \ ---volume /tmp/ci_checker:/tmp/ci_checker \ ---volume ~/.dblab/run_ci.yaml:/home/dblab/configs/run_ci.yaml \ -postgresai/dblab-ci-checker:${dle_version_full} +sudo docker run \ + --name dblab_ci_checker \ + --label dblab_control \ + --detach \ + --restart on-failure \ + --publish 2500:2500 \ + --volume /var/run/docker.sock:/var/run/docker.sock \ + --volume /tmp/ci_checker:/tmp/ci_checker \ + --volume ~/.dblab/run_ci.yaml:/home/dblab/configs/run_ci.yaml \ + registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:${dle_version_full} From 4fd5a487adf93f30f8d761d9b3f2e313d84d2d81 Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Tue, 7 Sep 2021 05:18:35 +0000 Subject: [PATCH 03/25] refactor: configuration structure of Database Lab products (database-lab#278) Adapt Terraform module to configuration changes: * redefine configuration structure of Database Lab products * create required directories * fix variable names --- dle-logical-init.sh.tpl | 134 +++++++++++++++++++++++----------------- instance.tf | 3 +- terraform.tfvars | 3 +- variables.tf | 11 +++- 4 files changed, 89 insertions(+), 62 deletions(-) diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 82f5ae2..028c606 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -100,25 +100,31 @@ for i in $${!disks[@]}; do done # Adjust DLE config -mkdir -p ~/.dblab/postgres_conf/ - -curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/config.example.logical_generic.yml --output ~/.dblab/server.yml -curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/postgres/pg_hba.conf \ - --output ~/.dblab/postgres_conf/pg_hba.conf -curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/postgres/postgresql.conf --output ~/.dblab/postgres_conf/postgresql.conf -cat /tmp/postgresql_clones_custom.conf >> ~/.dblab/postgres_conf/postgresql.conf - -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/server.yml -sed -ri "s/^(\s*)(verificationToken:.*$)/\1verificationToken: ${dle_verification_token}/" ~/.dblab/server.yml -sed -ri "s/^(\s*)(timetable:.*$)/\1timetable: \"${dle_retrieval_refresh_timetable}\"/" ~/.dblab/server.yml -sed -ri "s/^(\s*)(forceInit:.*$)/\1forceInit: true/" ~/.dblab/server.yml -sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" ~/.dblab/server.yml +dle_config_path="/home/ubuntu/.dblab/engine/configs" +dle_meta_path="/home/ubuntu/.dblab/engine/meta" +postgres_conf_path="/home/ubuntu/.dblab/postgres_conf" + +mkdir -p $dle_config_path +mkdir -p $dle_meta_path +mkdir -p $postgres_conf_path + +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/config.example.logical_generic.yml --output $dle_config_path/server.yml +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/standard/postgres/control/pg_hba.conf \ + --output $postgres_conf_path/pg_hba.conf +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/standard/postgres/control/postgresql.conf --output $postgres_conf_path/postgresql.conf +cat /tmp/postgresql_clones_custom.conf >> $postgres_conf_path/postgresql.conf + +sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $dle_config_path/server.yml +sed -ri "s/^(\s*)(verificationToken:.*$)/\1verificationToken: ${dle_verification_token}/" $dle_config_path/server.yml +sed -ri "s/^(\s*)(timetable:.*$)/\1timetable: \"${dle_retrieval_refresh_timetable}\"/" $dle_config_path/server.yml +sed -ri "s/^(\s*)(forceInit:.*$)/\1forceInit: true/" $dle_config_path/server.yml +sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" $dle_config_path/server.yml # Enable Platform -sed -ri "s/^(\s*)(#platform:$)/\1platform: /" ~/.dblab/server.yml -sed -ri "s/^(\s*)(# url: \"https\\:\\/\\/postgres.ai\\/api\\/general\"$)/\1 url: \"https\\:\\/\\/postgres.ai\\/api\\/general\" /" ~/.dblab/server.yml -sed -ri "s/^(\s*)(# accessToken: \"platform_access_token\"$)/\1 accessToken: \"${platform_access_token}\"/" ~/.dblab/server.yml -sed -ri "s/^(\s*)(# enablePersonalTokens: true$)/\1 enablePersonalTokens: true/" ~/.dblab/server.yml -sed -ri "s/:13/:${source_postgres_version}/g" ~/.dblab/server.yml +sed -ri "s/^(\s*)(#platform:$)/\1platform: /" $dle_config_path/server.yml +sed -ri "s/^(\s*)(# url: \"https\\:\\/\\/postgres.ai\\/api\\/general\"$)/\1 url: \"https\\:\\/\\/postgres.ai\\/api\\/general\" /" $dle_config_path/server.yml +sed -ri "s/^(\s*)(# accessToken: \"platform_access_token\"$)/\1 accessToken: \"${platform_access_token}\"/" $dle_config_path/server.yml +sed -ri "s/^(\s*)(# enablePersonalTokens: true$)/\1 enablePersonalTokens: true/" $dle_config_path/server.yml +sed -ri "s/:13/:${source_postgres_version}/g" $dle_config_path/server.yml case "${source_type}" in @@ -126,18 +132,18 @@ case "${source_type}" in # Mount directory to store dump files. extra_mount="--volume /var/lib/dblab/dblab_pool_00/dump:/var/lib/dblab/dblab_pool/dump" - sed -ri "s/^(\s*)(host: 34.56.78.90$)/\1host: ${source_postgres_host}/" ~/.dblab/server.yml - sed -ri "s/^(\s*)(port: 5432$)/\1port: ${source_postgres_port}/" ~/.dblab/server.yml - sed -ri "s/^(\s*)( username: postgres$)/\1 username: ${source_postgres_username}/" ~/.dblab/server.yml - sed -ri "s/^(\s*)(password:.*$)/\1password: ${source_postgres_password}/" ~/.dblab/server.yml + sed -ri "s/^(\s*)(host: 34.56.78.90$)/\1host: ${source_postgres_host}/" $dle_config_path/server.yml + sed -ri "s/^(\s*)(port: 5432$)/\1port: ${source_postgres_port}/" $dle_config_path/server.yml + sed -ri "s/^(\s*)( username: postgres$)/\1 username: ${source_postgres_username}/" $dle_config_path/server.yml + sed -ri "s/^(\s*)(password:.*$)/\1password: ${source_postgres_password}/" $dle_config_path/server.yml # restore pg_dump via pipe - without saving it on the disk - sed -ri "s/^(\s*)(parallelJobs:.*$)/\1parallelJobs: 1/" ~/.dblab/server.yml - sed -ri "s/^(\s*)(# immediateRestore:.*$)/\1immediateRestore: /" ~/.dblab/server.yml - sed -ri "s/^(\s*)(# forceInit: false.*$)/\1 forceInit: true /" ~/.dblab/server.yml - sed -ri "s/^(\s*)( # configs:$)/\1 configs: /" ~/.dblab/server.yml - sed -ri "s/^(\s*)( # shared_preload_libraries: .*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" ~/.dblab/server.yml - sed -ri "s/^(\s*)( shared_preload_libraries:.*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" ~/.dblab/server.yml - sed -ri "s/^(\s*)(- logicalRestore.*$)/\1#- logicalRestore /" ~/.dblab/server.yml + sed -ri "s/^(\s*)(parallelJobs:.*$)/\1parallelJobs: 1/" $dle_config_path/server.yml + sed -ri "s/^(\s*)(# immediateRestore:.*$)/\1immediateRestore: /" $dle_config_path/server.yml + sed -ri "s/^(\s*)(# forceInit: false.*$)/\1 forceInit: true /" $dle_config_path/server.yml + sed -ri "s/^(\s*)( # configs:$)/\1 configs: /" $dle_config_path/server.yml + sed -ri "s/^(\s*)( # shared_preload_libraries: .*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" $dle_config_path/server.yml + sed -ri "s/^(\s*)( shared_preload_libraries:.*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" $dle_config_path/server.yml + sed -ri "s/^(\s*)(- logicalRestore.*$)/\1#- logicalRestore /" $dle_config_path/server.yml ;; s3) @@ -146,11 +152,11 @@ case "${source_type}" in s3fs ${source_pgdump_s3_bucket} ${source_pgdump_s3_mount_point} -o iam_role -o allow_other extra_mount="--volume ${source_pgdump_s3_mount_point}:${source_pgdump_s3_mount_point}" - - sed -ri "s/^(\s*)(- logicalDump.*$)/\1#- logicalDump /" ~/.dblab/server.yml - sed -ri "s|^(\s*)( dumpLocation:.*$)|\1 dumpLocation: ${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}|" ~/.dblab/server.yml - sed -ri '/is always single-threaded./{n;s/.*/ parallelJobs: '${postgres_dump_parallel_jobs}'/}' ~/.dblab/server.yml - sed -ri '/jobs to restore faster./{n;s/.*/ parallelJobs: '$(getconf _NPROCESSORS_ONLN)'/}' ~/.dblab/server.yml + + sed -ri "s/^(\s*)(- logicalDump.*$)/\1#- logicalDump /" $dle_config_path/server.yml + sed -ri "s|^(\s*)( dumpLocation:.*$)|\1 dumpLocation: ${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}|" $dle_config_path/server.yml + sed -ri '/is always single-threaded./{n;s/.*/ parallelJobs: '${postgres_dump_parallel_jobs}'/}' $dle_config_path/server.yml + sed -ri '/jobs to restore faster./{n;s/.*/ parallelJobs: '$(getconf _NPROCESSORS_ONLN)'/}' $dle_config_path/server.yml ;; esac @@ -162,13 +168,14 @@ sudo docker run \ --publish 2345:2345 \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume /var/lib/dblab:/var/lib/dblab/:rshared \ - --volume ~/.dblab/server.yml:/home/dblab/configs/config.yml \ - --volume /root/.dblab/postgres_conf:/home/dblab/configs/postgres \ + --volume $dle_config_path:/home/dblab/configs:ro \ + --volume $dle_meta_path:/home/dblab/meta \ + --volume $postgres_conf_path:/home/dblab/standard/postgres/control \ $extra_mount \ --env DOCKER_API_VERSION=1.39 \ --detach \ --restart on-failure \ - registry.gitlab.com/postgres-ai/database-lab/dblab-server:${dle_version_full} +registry.gitlab.com/postgres-ai/database-lab/dblab-server:${dle_version} ### Waiting for the Database Lab Engine initialization. for i in {1..30000}; do @@ -176,7 +183,7 @@ for i in {1..30000}; do sleep 10 done -curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/scripts/cli_install.sh | bash +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/scripts/cli_install.sh | bash sudo mv ~/.dblab/dblab /usr/local/bin/dblab dblab init \ --environment-id=tutorial \ @@ -185,31 +192,44 @@ dblab init \ --insecure # Configure and run Joe Bot container. -cp /home/ubuntu/joe.yml ~/.dblab/joe.yml -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${platform_access_token}/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${dle_verification_token}/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)( url:.*$)/\1 url: \"http\\:\\/\\/localhost\\:2345\"/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)(signingSecret:.*$)/\1signingSecret: ${platform_joe_signing_secret}/" ~/.dblab/joe.yml -sed -ri "s/^(\s*)(project:.*$)/\1project: ${platform_project_name}/" ~/.dblab/joe.yml +joe_config_path="/home/ubuntu/.dblab/joe/configs" +joe_meta_path="/home/ubuntu/.dblab/joe/meta" + +mkdir -p $joe_config_path +mkdir -p $joe_meta_path + +# Copy configuration file from Packer-baked image. +cp /home/ubuntu/joe.yml $joe_config_path/joe.yml + +sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)( token:.*$)/\1 token: ${platform_access_token}/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)( token:.*$)/\1 token: ${dle_verification_token}/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)( url:.*$)/\1 url: \"http\\:\\/\\/localhost\\:2345\"/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)(signingSecret:.*$)/\1signingSecret: ${platform_joe_signing_secret}/" $joe_config_path/joe.yml +sed -ri "s/^(\s*)(project:.*$)/\1project: ${platform_project_name}/" $joe_config_path/joe.yml sudo docker run \ --name joe_bot \ --network=host \ --restart=on-failure \ - --volume ~/.dblab/joe.yml:/home/config/config.yml \ + --volume $joe_config_path:/home/configs:ro \ + --volume $joe_meta_path:/home/meta \ --detach \ - postgresai/joe:latest + registry.gitlab.com/postgres-ai/joe:${joe_version} # Configure and run DB Migration Checker. -curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version_full}/configs/config.example.run_ci.yaml --output ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)( verificationToken: \"secret_token\".*$)/\1 verificationToken: ${vcs_db_migration_checker_verification_token}/" ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)( url: \"https\\:\\/\\/dblab.domain.com\"$)/\1 url: \"http\\:\\/\\/dblab_server\\:2345\"/" ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)( verificationToken: \"checker_secret_token\".*$)/\1 verificationToken: ${dle_verification_token}/" ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)( accessToken:.*$)/\1 accessToken: ${platform_access_token}/" ~/.dblab/run_ci.yaml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${vcs_github_secret_token}/" ~/.dblab/run_ci.yaml +ci_checker_config_path="/home/ubuntu/.dblab/ci_checker/configs" +mkdir -p $ci_checker_config_path + +curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/config.example.ci_checker.yml --output $ci_checker_config_path/ci_checker.yml + +sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $ci_checker_config_path/ci_checker.yml +sed -ri "s/^(\s*)( verificationToken: \"secret_token\".*$)/\1 verificationToken: ${vcs_db_migration_checker_verification_token}/" $ci_checker_config_path/ci_checker.yml +sed -ri "s/^(\s*)( url: \"https\\:\\/\\/dblab.domain.com\"$)/\1 url: \"http\\:\\/\\/dblab_server\\:2345\"/" $ci_checker_config_path/ci_checker.yml +sed -ri "s/^(\s*)( verificationToken: \"checker_secret_token\".*$)/\1 verificationToken: ${dle_verification_token}/" $ci_checker_config_path/ci_checker.yml +sed -ri "s/^(\s*)( accessToken:.*$)/\1 accessToken: ${platform_access_token}/" $ci_checker_config_path/ci_checker.yml +sed -ri "s/^(\s*)( token:.*$)/\1 token: ${vcs_github_secret_token}/" $ci_checker_config_path/ci_checker.yml sudo docker run \ --name dblab_ci_checker \ @@ -219,5 +239,5 @@ sudo docker run \ --publish 2500:2500 \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume /tmp/ci_checker:/tmp/ci_checker \ - --volume ~/.dblab/run_ci.yaml:/home/dblab/configs/run_ci.yaml \ - registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:${dle_version_full} + --volume $ci_checker_config_path:/home/dblab/configs:ro \ +registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:${dle_version} diff --git a/instance.tf b/instance.tf index 4b8ddb1..04aa140 100644 --- a/instance.tf +++ b/instance.tf @@ -29,7 +29,8 @@ data "template_file" "init" { dle_debug_mode = "${var.dle_debug_mode}" dle_retrieval_refresh_timetable = "${var.dle_retrieval_refresh_timetable}" dle_disks = "${join(" ",var.aws_deploy_ec2_volumes_names)}" - dle_version_full = "${var.dle_version_full}" + dle_version = "${var.dle_version}" + joe_version = "${var.joe_version}" aws_deploy_dns_zone_name = "${var.aws_deploy_dns_zone_name}" aws_deploy_dns_api_subdomain = "${var.aws_deploy_dns_api_subdomain}" aws_deploy_certificate_email = "${var.aws_deploy_certificate_email}" diff --git a/terraform.tfvars b/terraform.tfvars index f7aa1da..23ff833 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,5 @@ -dle_version_full = "2.4.1" +dle_version = "2.5.0" +joe_version = "0.10.0" aws_ami_name = "DBLABserver*" aws_keypair = "YOUR_AWS_KEYPAIR" diff --git a/variables.tf b/variables.tf index bdfa4ee..6623a9f 100644 --- a/variables.tf +++ b/variables.tf @@ -3,9 +3,14 @@ variable "aws_ami_name" { default = "DBLABserver" } -variable "dle_version_full" { - description = "3-digit DLE version (2-digit major + minor)" - default = "2.4.1" +variable "dle_version" { + description = "Semantic DLE version (3-digits: major, minor, patch)" + default = "2.5.0" +} + +variable "joe_version" { + description = "Semantic Joe Bot version (3-digits: major, minor, patch)" + default = "0.10.0" } variable "aws_deploy_region" { From cc1765e52f3d6d0f0e462b8cf2dd26ea62e2712a Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Wed, 8 Sep 2021 09:19:47 +0000 Subject: [PATCH 04/25] * fix missing option to enable immediate restore * add default key to the .gitignore file --- .gitignore | 1 + dle-logical-init.sh.tpl | 1 + 2 files changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 064695e..242deb0 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ example.com.key postgres.example.com.csr test.pem secret.tfvars +ubuntu.pem diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 028c606..6a0ea8a 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -139,6 +139,7 @@ case "${source_type}" in # restore pg_dump via pipe - without saving it on the disk sed -ri "s/^(\s*)(parallelJobs:.*$)/\1parallelJobs: 1/" $dle_config_path/server.yml sed -ri "s/^(\s*)(# immediateRestore:.*$)/\1immediateRestore: /" $dle_config_path/server.yml + sed -ri "s/^(\s*)(# enabled: true.*$)/\1 enabled: true /" $dle_config_path/server.yml sed -ri "s/^(\s*)(# forceInit: false.*$)/\1 forceInit: true /" $dle_config_path/server.yml sed -ri "s/^(\s*)( # configs:$)/\1 configs: /" $dle_config_path/server.yml sed -ri "s/^(\s*)( # shared_preload_libraries: .*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" $dle_config_path/server.yml From aaeb4242a4c894fbfec1e74c4c92c7858ba20fc6 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Thu, 9 Sep 2021 19:17:53 +0000 Subject: [PATCH 05/25] DLE AMI search criteria fix --- role.tf | 2 +- variables.tf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/role.tf b/role.tf index 5a2cd69..39a7825 100644 --- a/role.tf +++ b/role.tf @@ -27,7 +27,7 @@ resource "aws_iam_role" "db_lab_engine_role" { Resource = "arn:aws:s3:::${var.source_pgdump_s3_bucket}" }, { - Action = ["s3:GetObject"] + Action = ["s3:GetObject","s3:GetObjectAcl"] Effect = "Allow" Resource = "arn:aws:s3:::${var.source_pgdump_s3_bucket}/*" # Grant read access to entire bucket } diff --git a/variables.tf b/variables.tf index 6623a9f..bd36bd4 100644 --- a/variables.tf +++ b/variables.tf @@ -20,7 +20,7 @@ variable "aws_deploy_region" { variable "aws_ami_owner" { description = "Filter for the AMI owner" - default = "self" + default = "005923036815" # Postgres.ai account publishes public AMI for DLE } variable "aws_deploy_ec2_instance_type" { description = "Type of EC2 instance" From 7b5270643d593aa99f8df69a76eed1763fbc45a0 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Tue, 14 Sep 2021 15:21:14 +0000 Subject: [PATCH 06/25] initial commit --- instance.tf | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/instance.tf b/instance.tf index 04aa140..e6687fd 100644 --- a/instance.tf +++ b/instance.tf @@ -22,9 +22,15 @@ resource "random_string" "vcs_db_migration_checker_verification_token" { special = false } -data "template_file" "init" { - template = "${file("dle-logical-init.sh.tpl")}" - vars = { +resource "aws_instance" "aws_ec2" { + ami = "${data.aws_ami.ami.id}" + availability_zone = "${var.aws_deploy_ebs_availability_zone}" + instance_type = "${var.aws_deploy_ec2_instance_type}" + security_groups = ["${aws_security_group.dle_instance_sg.name}"] + key_name = "${var.aws_keypair}" + tags = "${local.common_tags}" + iam_instance_profile = "${var.source_type == "s3" ? "${aws_iam_instance_profile.instance_profile[0].name}" : null}" + user_data = templatefile("${path.module}/dle-logical-init.sh.tpl",{ dle_verification_token = "${random_string.dle_verification_token.result}" dle_debug_mode = "${var.dle_debug_mode}" dle_retrieval_refresh_timetable = "${var.dle_retrieval_refresh_timetable}" @@ -51,19 +57,7 @@ data "template_file" "init" { source_pgdump_s3_bucket = "${var.source_pgdump_s3_bucket}" source_pgdump_s3_mount_point = "${var.source_pgdump_s3_mount_point}" source_pgdump_path_on_s3_bucket = "${var.source_pgdump_path_on_s3_bucket}" - } -} - -resource "aws_instance" "aws_ec2" { - ami = "${data.aws_ami.ami.id}" - availability_zone = "${var.aws_deploy_ebs_availability_zone}" - instance_type = "${var.aws_deploy_ec2_instance_type}" - security_groups = ["${aws_security_group.dle_instance_sg.name}"] - key_name = "${var.aws_keypair}" - tags = "${local.common_tags}" - iam_instance_profile = "${var.source_type == "s3" ? "${aws_iam_instance_profile.instance_profile[0].name}" : null}" - user_data = "${data.template_file.init.rendered}" - + }) provisioner "file" { source = "postgresql_clones_custom.conf" destination = "/tmp/postgresql_clones_custom.conf" From f8d672557d834350675374c389c73379b1fc9012 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Tue, 14 Sep 2021 17:14:09 +0000 Subject: [PATCH 07/25] Update README.md --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index c1bd25c..f18c7e2 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,3 @@ -[[_TOC_]] - # Database Lab Terraform Module This [Terraform Module](https://www.terraform.io/docs/language/modules/index.html) is responsible for deploying the [Database Lab Engine](https://gitlab.com/postgres-ai/database-lab) to cloud hosting providers. From d3f01cc87bc199d66e1385f188a5a00902da4775 Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Tue, 16 Nov 2021 06:10:19 +0000 Subject: [PATCH 08/25] feat: replace sed instructions with the yq processor commands to make configuration adjustment more reliable in the TF-template --- dle-logical-init.sh.tpl | 102 ++++++++++++++++++++++++---------------- 1 file changed, 61 insertions(+), 41 deletions(-) diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 6a0ea8a..ef91475 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -114,17 +114,21 @@ curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/st curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/standard/postgres/control/postgresql.conf --output $postgres_conf_path/postgresql.conf cat /tmp/postgresql_clones_custom.conf >> $postgres_conf_path/postgresql.conf -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $dle_config_path/server.yml -sed -ri "s/^(\s*)(verificationToken:.*$)/\1verificationToken: ${dle_verification_token}/" $dle_config_path/server.yml -sed -ri "s/^(\s*)(timetable:.*$)/\1timetable: \"${dle_retrieval_refresh_timetable}\"/" $dle_config_path/server.yml -sed -ri "s/^(\s*)(forceInit:.*$)/\1forceInit: true/" $dle_config_path/server.yml -sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" $dle_config_path/server.yml +yq e -i ' + .global.debug=${dle_debug_mode} | + .server.verificationToken="${dle_verification_token}" | + .retrieval.refresh.timetable="${dle_retrieval_refresh_timetable}" | + .retrieval.spec.logicalDump.options.source.connection.dbname="${source_postgres_dbname}" | + .retrieval.spec.logicalRestore.options.forceInit=true | + .databaseContainer.dockerImage="postgresai/extended-postgres:${source_postgres_version}" +' $dle_config_path/server.yml + # Enable Platform -sed -ri "s/^(\s*)(#platform:$)/\1platform: /" $dle_config_path/server.yml -sed -ri "s/^(\s*)(# url: \"https\\:\\/\\/postgres.ai\\/api\\/general\"$)/\1 url: \"https\\:\\/\\/postgres.ai\\/api\\/general\" /" $dle_config_path/server.yml -sed -ri "s/^(\s*)(# accessToken: \"platform_access_token\"$)/\1 accessToken: \"${platform_access_token}\"/" $dle_config_path/server.yml -sed -ri "s/^(\s*)(# enablePersonalTokens: true$)/\1 enablePersonalTokens: true/" $dle_config_path/server.yml -sed -ri "s/:13/:${source_postgres_version}/g" $dle_config_path/server.yml +yq e -i ' + .platform.url = "https://postgres.ai/api/general" | + .platform.accessToken = "${platform_access_token}" | + .platform.enablePersonalTokens = true | +' $dle_config_path/server.yml case "${source_type}" in @@ -132,19 +136,23 @@ case "${source_type}" in # Mount directory to store dump files. extra_mount="--volume /var/lib/dblab/dblab_pool_00/dump:/var/lib/dblab/dblab_pool/dump" - sed -ri "s/^(\s*)(host: 34.56.78.90$)/\1host: ${source_postgres_host}/" $dle_config_path/server.yml - sed -ri "s/^(\s*)(port: 5432$)/\1port: ${source_postgres_port}/" $dle_config_path/server.yml - sed -ri "s/^(\s*)( username: postgres$)/\1 username: ${source_postgres_username}/" $dle_config_path/server.yml - sed -ri "s/^(\s*)(password:.*$)/\1password: ${source_postgres_password}/" $dle_config_path/server.yml + yq e -i ' + .retrieval.spec.logicalDump.options.source.connection.host = ${source_postgres_host}" | + .retrieval.spec.logicalDump.options.source.connection.port = ${source_postgres_port}" | + .retrieval.spec.logicalDump.options.source.connection.username = ${source_postgres_username}" | + .retrieval.spec.logicalDump.options.source.connection.password = ${source_postgres_password}" | + .retrieval.spec.logicalDump.options.parallelJobs = 1" + ' $dle_config_path/server.yml + # restore pg_dump via pipe - without saving it on the disk - sed -ri "s/^(\s*)(parallelJobs:.*$)/\1parallelJobs: 1/" $dle_config_path/server.yml - sed -ri "s/^(\s*)(# immediateRestore:.*$)/\1immediateRestore: /" $dle_config_path/server.yml - sed -ri "s/^(\s*)(# enabled: true.*$)/\1 enabled: true /" $dle_config_path/server.yml - sed -ri "s/^(\s*)(# forceInit: false.*$)/\1 forceInit: true /" $dle_config_path/server.yml - sed -ri "s/^(\s*)( # configs:$)/\1 configs: /" $dle_config_path/server.yml - sed -ri "s/^(\s*)( # shared_preload_libraries: .*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" $dle_config_path/server.yml - sed -ri "s/^(\s*)( shared_preload_libraries:.*$)/\1 shared_preload_libraries: '${postgres_config_shared_preload_libraries}'/" $dle_config_path/server.yml - sed -ri "s/^(\s*)(- logicalRestore.*$)/\1#- logicalRestore /" $dle_config_path/server.yml + yq e -i ' + .databaseContainer.dockerImage="postgresai/extended-postgres:${source_postgres_version}" | + .retrieval.spec.logicalDump.options.immediateRestore.enabled=true | + .retrieval.spec.logicalDump.options.immediateRestore.forceInit=true | + .retrieval.spec.logicalDump.options.immediateRestore.configs alias = .databaseConfig | + del(.retrieval.jobs[] | select(. == "logicalRestore")) | + .databaseConfig.configs.shared_preload_libraries = ${postgres_config_shared_preload_libraries} + ' $dle_config_path/server.yml ;; s3) @@ -154,10 +162,16 @@ case "${source_type}" in extra_mount="--volume ${source_pgdump_s3_mount_point}:${source_pgdump_s3_mount_point}" - sed -ri "s/^(\s*)(- logicalDump.*$)/\1#- logicalDump /" $dle_config_path/server.yml - sed -ri "s|^(\s*)( dumpLocation:.*$)|\1 dumpLocation: ${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}|" $dle_config_path/server.yml - sed -ri '/is always single-threaded./{n;s/.*/ parallelJobs: '${postgres_dump_parallel_jobs}'/}' $dle_config_path/server.yml - sed -ri '/jobs to restore faster./{n;s/.*/ parallelJobs: '$(getconf _NPROCESSORS_ONLN)'/}' $dle_config_path/server.yml + yq e -i ' + del(.retrieval.jobs[] | select(. == "logicalDump")) | + .retrieval.spec.logicalRestore.options.dumpLocation="${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}" + ' $dle_config_path/server.yml + + nProcessors = $(getconf _NPROCESSORS_ONLN) + yq e -i ' + .retrieval.spec.logicalDump.options.parallelJobs=${postgres_dump_parallel_jobs} | + .retrieval.spec.logicalRestore.options.parallelJobs=$nProcessors + ' $dle_config_path/server.yml ;; esac @@ -199,16 +213,20 @@ joe_meta_path="/home/ubuntu/.dblab/joe/meta" mkdir -p $joe_config_path mkdir -p $joe_meta_path -# Copy configuration file from Packer-baked image. -cp /home/ubuntu/joe.yml $joe_config_path/joe.yml +curl https://gitlab.com/postgres-ai/joe/-/raw/${joe_version}/configs/config.example.yml --output $joe_config_path/joe.yml -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${platform_access_token}/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${dle_verification_token}/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)( url:.*$)/\1 url: \"http\\:\\/\\/localhost\\:2345\"/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)(dbname:.*$)/\1dbname: ${source_postgres_dbname}/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)(signingSecret:.*$)/\1signingSecret: ${platform_joe_signing_secret}/" $joe_config_path/joe.yml -sed -ri "s/^(\s*)(project:.*$)/\1project: ${platform_project_name}/" $joe_config_path/joe.yml +yq e -i ' + .app.debug = ${dle_debug_mode} | + .platform.token = "${platform_access_token}" | + .channelMapping.dblabServers.prod1.token = "${dle_verification_token}" | + .channelMapping.dblabServers.prod1.url = "http://localhost:2345/" | + .channelMapping.communicationTypes.webui[0].credentials.signingSecret = "${platform_joe_signing_secret}" | + .channelMapping.communicationTypes.webui[0].channels[0].project = "${platform_project_name}" | + .channelMapping.communicationTypes.webui[0].channels[0].dblabParams.dbname = "${source_postgres_dbname}" | + del(.channelMapping.communicationTypes.slack) | + del(.channelMapping.communicationTypes.slackrtm) | + del(.channelMapping.communicationTypes.slacksm) +' $joe_config_path/joe.yml sudo docker run \ --name joe_bot \ @@ -225,12 +243,14 @@ mkdir -p $ci_checker_config_path curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/configs/config.example.ci_checker.yml --output $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)(debug:.*$)/\1debug: ${dle_debug_mode}/" $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)( verificationToken: \"secret_token\".*$)/\1 verificationToken: ${vcs_db_migration_checker_verification_token}/" $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)( url: \"https\\:\\/\\/dblab.domain.com\"$)/\1 url: \"http\\:\\/\\/dblab_server\\:2345\"/" $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)( verificationToken: \"checker_secret_token\".*$)/\1 verificationToken: ${dle_verification_token}/" $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)( accessToken:.*$)/\1 accessToken: ${platform_access_token}/" $ci_checker_config_path/ci_checker.yml -sed -ri "s/^(\s*)( token:.*$)/\1 token: ${vcs_github_secret_token}/" $ci_checker_config_path/ci_checker.yml +yq e -i ' + .app.debug = ${dle_debug_mode} | + .app.verificationToken = "${vcs_db_migration_checker_verification_token}" | + .dle.url = "http://dblab_server:2345/" | + .dle.verificationToken = "${dle_verification_token}" | + .platform.accessToken = "${platform_access_token}" | + .source.token = "${vcs_github_secret_token}" +' $ci_checker_config_path/ci_checker.yml sudo docker run \ --name dblab_ci_checker \ From 9f6ffde461b2d2d13260311178a3f206d5a9f375 Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Fri, 19 Nov 2021 12:09:23 +0000 Subject: [PATCH 09/25] feat: open port to provide access to a local UI container (database-lab#302) --- dle-logical-init.sh.tpl | 12 ++++++------ outputs.tf | 3 +++ security.tf | 27 +++++++-------------------- 3 files changed, 16 insertions(+), 26 deletions(-) diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index ef91475..957d18e 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -137,11 +137,11 @@ case "${source_type}" in extra_mount="--volume /var/lib/dblab/dblab_pool_00/dump:/var/lib/dblab/dblab_pool/dump" yq e -i ' - .retrieval.spec.logicalDump.options.source.connection.host = ${source_postgres_host}" | - .retrieval.spec.logicalDump.options.source.connection.port = ${source_postgres_port}" | - .retrieval.spec.logicalDump.options.source.connection.username = ${source_postgres_username}" | - .retrieval.spec.logicalDump.options.source.connection.password = ${source_postgres_password}" | - .retrieval.spec.logicalDump.options.parallelJobs = 1" + .retrieval.spec.logicalDump.options.source.connection.host = "${source_postgres_host}" | + .retrieval.spec.logicalDump.options.source.connection.port = ${source_postgres_port} | + .retrieval.spec.logicalDump.options.source.connection.username = "${source_postgres_username}" | + .retrieval.spec.logicalDump.options.source.connection.password = "${source_postgres_password}" | + .retrieval.spec.logicalDump.options.parallelJobs = 1 ' $dle_config_path/server.yml # restore pg_dump via pipe - without saving it on the disk @@ -151,7 +151,7 @@ case "${source_type}" in .retrieval.spec.logicalDump.options.immediateRestore.forceInit=true | .retrieval.spec.logicalDump.options.immediateRestore.configs alias = .databaseConfig | del(.retrieval.jobs[] | select(. == "logicalRestore")) | - .databaseConfig.configs.shared_preload_libraries = ${postgres_config_shared_preload_libraries} + .databaseConfigs.configs.shared_preload_libraries = "${postgres_config_shared_preload_libraries}" ' $dle_config_path/server.yml ;; diff --git a/outputs.tf b/outputs.tf index 73c7edb..7ebc6fa 100644 --- a/outputs.tf +++ b/outputs.tf @@ -25,3 +25,6 @@ output "vcs_db_migration_checker_verification_token" { output "vcs_db_migration_checker_registration_url" { value = "${format("%s://%s:%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn),"445")}" } +output "local_ui_url" { + value = "${format("%s://%s:%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn),"446")}" +} diff --git a/security.tf b/security.tf index ad97b3b..c3dd56c 100644 --- a/security.tf +++ b/security.tf @@ -12,20 +12,16 @@ resource "aws_security_group_rule" "dle_instance_ssh" { cidr_blocks = "${var.aws_deploy_allow_ssh_from_cidrs}" } -resource "aws_security_group_rule" "dle_instance_api" { +resource "aws_security_group_rule" "dle_products" { + # Ports: + # 443 - dle_instance_api + # 444 - joe_bot_api + # 445 - ci_observer_api + # 446 - dle_local_ui security_group_id = aws_security_group.dle_instance_sg.id type = "ingress" from_port = 443 - to_port = 443 - protocol = "tcp" - cidr_blocks = "${var.aws_deploy_allow_api_from_cidrs}" -} - -resource "aws_security_group_rule" "joe_bot_api" { - security_group_id = aws_security_group.dle_instance_sg.id - type = "ingress" - from_port = 444 - to_port = 444 + to_port = 446 protocol = "tcp" cidr_blocks = "${var.aws_deploy_allow_api_from_cidrs}" } @@ -39,15 +35,6 @@ resource "aws_security_group_rule" "dle_instance_http_cert_auth" { cidr_blocks = ["0.0.0.0/0"] } -resource "aws_security_group_rule" "ci_observer_api" { - security_group_id = aws_security_group.dle_instance_sg.id - type = "ingress" - from_port = 445 - to_port = 445 - protocol = "tcp" - cidr_blocks = "${var.aws_deploy_allow_api_from_cidrs}" -} - resource "aws_security_group_rule" "dle_instance_clones" { security_group_id = aws_security_group.dle_instance_sg.id type = "ingress" From 080c213d0f7262bcb4d8713b8d5386004bdc740b Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Wed, 8 Dec 2021 14:13:53 +0000 Subject: [PATCH 10/25] self generated sshkey to provision EC2 --- instance.tf | 44 ++++++++++++++++++++++++++++++++++++++++++-- variables.tf | 10 ++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/instance.tf b/instance.tf index e6687fd..5ff7bd1 100644 --- a/instance.tf +++ b/instance.tf @@ -1,3 +1,18 @@ +locals { + public_key = file("~/.ssh/id_rsa.pub") + } + +resource "tls_private_key" "ssh_key" { + algorithm = "RSA" + rsa_bits = 4096 +} + +resource "aws_key_pair" "provision_key" { + key_name = "${var.aws_deploy_ec2_instance_tag_name}" + public_key = tls_private_key.ssh_key.public_key_openssh +} + + resource "random_string" "dle_verification_token" { length = 32 upper = true @@ -27,7 +42,7 @@ resource "aws_instance" "aws_ec2" { availability_zone = "${var.aws_deploy_ebs_availability_zone}" instance_type = "${var.aws_deploy_ec2_instance_type}" security_groups = ["${aws_security_group.dle_instance_sg.name}"] - key_name = "${var.aws_keypair}" + key_name = aws_key_pair.provision_key.key_name tags = "${local.common_tags}" iam_instance_profile = "${var.source_type == "s3" ? "${aws_iam_instance_profile.instance_profile[0].name}" : null}" user_data = templatefile("${path.module}/dle-logical-init.sh.tpl",{ @@ -58,6 +73,31 @@ resource "aws_instance" "aws_ec2" { source_pgdump_s3_mount_point = "${var.source_pgdump_s3_mount_point}" source_pgdump_path_on_s3_bucket = "${var.source_pgdump_path_on_s3_bucket}" }) + + provisioner "local-exec" { # save private key locally + command = "echo '${tls_private_key.ssh_key.private_key_pem}' > ./${var.aws_deploy_ec2_instance_tag_name}.pem" + } + provisioner "local-exec" { + command = "chmod 600 ./'${var.aws_deploy_ec2_instance_tag_name}'.pem" + } + + provisioner "remote-exec" { + connection { + type = "ssh" + user = "ubuntu" + private_key = "${tls_private_key.ssh_key.private_key_pem}" + host = "${self.public_dns}" + } + inline = [ + "echo 'ssh is ready, will copy ssh public keys'", + "echo '${join("\n", var.ssh_public_keys_list)}' >> ~/.ssh/authorized_keys" + ] + } + + provisioner "local-exec" { + command = "for ssh_key in ${join(" ", var.ssh_public_keys_files_list)}; do cat $ssh_key | ssh -i ./${var.aws_deploy_ec2_instance_tag_name}.pem ubuntu@${self.public_dns} -o StrictHostKeyChecking=no 'cat >> ~/.ssh/authorized_keys'; done" + } + provisioner "file" { source = "postgresql_clones_custom.conf" destination = "/tmp/postgresql_clones_custom.conf" @@ -65,7 +105,7 @@ resource "aws_instance" "aws_ec2" { connection { type = "ssh" user = "ubuntu" - private_key = "${file("ubuntu.pem")}" + private_key = "${tls_private_key.ssh_key.private_key_pem}" host = "${self.public_dns}" } } diff --git a/variables.tf b/variables.tf index bd36bd4..62fb669 100644 --- a/variables.tf +++ b/variables.tf @@ -177,3 +177,13 @@ variable "postgres_dump_parallel_jobs"{ description = "DLE config parallelJobs parameter value" default = "2" } + +variable "ssh_public_keys_files_list"{ + description = "List of files with ssh public key to copy to the provisioned instance with DLE" + default = [] +} + +variable "ssh_public_keys_list"{ + description = "List of ssh public keys to copy to the provisioned instance with DLE" + default = [] +} From 141eb789a580ff4ec0dab8fabe03f43911ffd760 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Wed, 8 Dec 2021 14:15:10 +0000 Subject: [PATCH 11/25] Mention password to be used for example DB --- terraform.tfvars | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform.tfvars b/terraform.tfvars index 23ff833..e703575 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -17,7 +17,7 @@ source_postgres_version = "13" source_postgres_host = "ec2-3-215-57-87.compute-1.amazonaws.com" source_postgres_port = "5432" source_postgres_dbname = "d3dljqkrnopdvg" # this is an existing DB (Heroku example DB) -source_postgres_username = "postgres" +source_postgres_username = "bfxuriuhcfpftt" # in secret.tfvars, use: source_postgres_password = "dfe01cbd809a71efbaecafec5311a36b439460ace161627e5973e278dfe960b7" dle_debug_mode = "true" dle_retrieval_refresh_timetable = "0 0 * * 0" From 29551dad1dd867670a4272cd5bfc0621e507be4f Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Wed, 8 Dec 2021 14:18:56 +0000 Subject: [PATCH 12/25] Reminder to run terraform destroy --- README.md | 167 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 166 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f18c7e2..78da195 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,172 @@ Your source PostgreSQL database can be located anywhere, but DLE with other comp ## Supported Cloud Platforms - AWS -## Installation +## Prerequisites +- [AWS Account](https://aws.amazon.com) +- [Terraform Installed](https://learn.hashicorp.com/tutorials/terraform/install-cli) (minimal version: 1.0.0) +- AWS [Route 53](https://aws.amazon.com/route53/) Hosted Zone (For setting up TLS) for a domain or sub-domain you control +- You must have AWS Access Keys and a default region in your Terraform environment (See section on required IAM Permissions) +- The DLE runs on an EC2 instance which can be accessed using a selected set of SSH keys uploaded to EC2. Use the Terraform parameter `aws_keypair` to specify which EC2 Keypair to use +- Required IAM Permissions: to successfully run this Terraform module, the IAM User/Role must have the following permissions: + * Read/Write permissions on EC2 + * Read/Write permissions on Route53 + * Read/Write permissions on Cloudwatch + +## How to use +- :construction: Currently, it is supposed that you run `terraform` commands on a Linux machine. MacOS and Windows support is not yet implemented (but planned). +- It is recommended to clone this Git repository and adjust for your needs. Below we provide the detailed step-by-step instructions for quick start (see "Quick start") for a PoC setup +- To configure parameters used by Terraform (and the Database Lab Engine itself), you will need to modify `terraform.tfvars` and create a file with secrets (`secret.tfvars`) +- This Terraform module can be run independently or combined with any other standard Terraform module. You can learn more about using Terraform and the Terraform CLI [here](https://www.terraform.io/docs/cli/commands/index.html) +- The variables can be set in multiple ways with the following precedence order (lowest to highest): + - default values in `variables.tf` + - values defined in `terraform.tfvars` + - values passed on the command line +- All variables starting with `postgres_` represent the source database connection information for the data (from that database) to be fetched by the DLE. That database must be accessible from the instance hosting the DLE (that one created by Terraform) + +## Quick start +The following steps were tested on Ubuntu 20.04 but supposed to be valid for other Linux distributions without significant modification. + +1. SSH to any machine with internet access, it will be used as deployment machine +1. Install Terraform https://learn.hashicorp.com/tutorials/terraform/install-cli. Example for Ubuntu: + ```shell + sudo apt-get update && sudo apt-get install -y gnupg software-properties-common curl + curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - + sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" # Adjust if you have ARM platform. + sudo apt-get update && sudo apt-get install terraform + # Verify installation. + terraform -help + ``` +1. Get TF code for Database Lab: + ```shell + git clone https://gitlab.com/postgres-ai/database-lab-infrastructure.git + cd database-lab-infrastructure/ + ``` +1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: + ```config + dle_version_full = "2.4.1" + + aws_ami_name = "DBLABserver*" + aws_keypair = "YOUR_AWS_KEYPAIR" + + aws_deploy_region = "us-east-1" + aws_deploy_ebs_availability_zone = "us-east-1a" + aws_deploy_ec2_instance_type = "t2.large" + aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" + aws_deploy_ebs_size = "40" + aws_deploy_ebs_type = "gp2" + aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] + aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}-engine.aws.postgres + + source_postgres_version = "13" + source_postgres_host = "ec2-3-215-57-87.compute-1.amazonaws.com" + source_postgres_port = "5432" + source_postgres_dbname = "d3dljqkrnopdvg" # this is an existing DB (Heroku example DB) + source_postgres_username = "postgres" + + dle_debug_mode = "true" + dle_retrieval_refresh_timetable = "0 0 * * 0" + postgres_config_shared_preload_libraries = "pg_stat_statements,logerrors" # DB Migration Checker requires logerrors extension + + platform_project_name = "aws_test_tf" + ``` +1. Create `secret.tfvars` containing `source_postgres_password`, `platform_access_token`, and `vcs_github_secret_token`. An example: + ```config + source_postgres_password = "YOUR_DB_PASSWORD" # todo: put pwd for heroku example DB here + platform_access_token = "YOUR_ACCESS_TOKEN" # to generate, open https://console.postgres.ai/, choose your organization, + # then "Access tokens" in the left menu + vcs_github_secret_token = "vcs_secret_token" # to generate, open https://github.com/settings/tokens/new + ``` +1. Initialize + ```shell + terraform init + ``` +1. Set environment variables with AWS credentials: + ```shell + export AWS_ACCESS_KEY_ID = "keyid" # todo: how to get it? + export AWS_SECRET_ACCESS_KEY = "accesskey" + ``` +1. Deploy: + ``` + terraform apply -var-file="secret.tfvars" -auto-approve + ``` +1. If everything goes well, you should get an output like this: + ```config + vcs_db_migration_checker_verification_token = "gsio7KmgaxECfJ80kUx2tUeIf4kEXZex" + dle_verification_token = "zXPodd13LyQaKgVXGmSCeB8TUtnGNnIa" + ec2_public_dns = "ec2-18-118-126-25.us-east-2.compute.amazonaws.com" + ec2instance = "i-0b07738148950af25" + ip = "18.118.126.25" + platform_joe_signing_secret = "lG23qZbUh2kq0ULIBfW6TRwKzqGZu1aP" + public_dns_name = "demo-api-engine.aws.postgres.ai" # todo: this should be URL, not hostname – further we'll need URL, with protocol – `https://` + ``` + +1. To verify result and check the progress, you might want to connect to the just-created EC2 machine using IP address or hostname from the Terraform output. In our example, it can be done using this one-liner (you can find more about DLE logs and configuration on this page: https://postgres.ai/docs/how-to-guides/administration/engine-manage): + ```shell + echo "sudo docker logs dblab_server -f" | ssh ubuntu@18.118.126.25 -i postgres_ext_test.pem + ``` + + Once you see the message like: + ``` + 2021/07/02 10:28:51 [INFO] Server started listening on :2345. + ``` + – it means that the DLE server started successfully and is waiting for you commands + + 1. Sign in to the [Postgres.ai Platform](https://console.postgres.ai/) and register your new DLE server: + 1. Go to `Database Lab > Instances` in the left menu + 1. Press the "Add instance" button + 1. `Project` – specify any name (this is how your DLE server will be named in the platform) + 1. `Verification token` – use the token generated above (`verification_token` value); do NOT press the "Generate" button here + 1. `URL` – use the value generated above // todo: not convenient, we need URL but reported was only hostname + 1. Press the "Verify URL" button to check the connectivity. Then press "Add". If everything is right, you should see the DLE page with green "OK" status: + + 1. Add Joe chatbot for efficient SQL optimization workflow: + 1. Go to the "SQL Optimization > Ask Joe" page using the left menu, click the "Add instance" button, specify the same project as you defined in the previous step + 1. `Signing secret` – use `platform_joe_signing_secret` from the Terraform output + 1. `URL` – use `public_dns_name` values from the Terraform output with port `444`; in our example, it's `https://demo-api-engine.aws.postgres.ai:444` + 1. Press "Verify URL" to check connectivity and then press "Add". You should see: + + + Now you can start using Joe chatbot for SQL execution plans troubleshooting and verification of optimization ideas. As a quick test, go to `SQL Optimization > Ask Joe` in the left menu, and enter `\dt+` command (a psql command to show the list of tables with sizes). You should see how Joe created a thin clone behind the scenes and immediately ran this psql command, presenting the result to you: + + +1. Set up [DB migration checker](https://postgres.ai/docs/db-migration-checker). Prepare a repository with your DB migrations(Flyway, Sqitch, Liquibase, etc.): + 1. Add secrets: + - `DLMC_CI_ENDPOINT` - an endpoint of your Database Lab Migration Checker service – use `vcs_db_migration_checker_registration_url` from the Terraform output + - `DLMC_VERIFICATION_TOKEN` - verification token for the Database Lab Migration Checker API – use `vcs_db_migration_checker_verification_token` from the Terraform output + 1. Configure a new workflow in the created repository (see an example of configuration: https://github.com/postgres-ai/green-zone/blob/master/.github/workflows/main.yml) + - add a custom action: https://github.com/marketplace/actions/database-lab-realistic-db-testing-in-ci + - provide input params for the action (the full list of available input params) + - provide environment variables: + - `DLMC_CI_ENDPOINT` - use a CI Checker endpoint from the repository secrets + - `DLMC_VERIFICATION_TOKEN` - use a verification token from the repository secrets + +1. Install and try the client CLI (`dblab`) + 1. Follow the [guide](https://postgres.ai/docs/how-to-guides/cli/cli-install-init) to install Database Lab CLI + 1. Initialize CLI: + ```shell + dblab init --environment-id= --url=https:// --token= + ``` + 1. Try it: + ```shell + dblab instance status + ``` + It should return the OK status: + ```json + { + "status": { + "code": "OK", + "message": "Instance is ready" + }, + ... + } + ``` + +This is it! + +If you need to remove everything created by this Terraform module, you can simply run `terraform destroy`. Do not forget to do it if you're just experimenting. Otherwise, if you leave infrastructure blocks running, they might significantly affect your AWS bill (depending on the EC2 instance family you've chosen, the disk type, and size). + +## Important Note +When the DLE creates new database clones, it makes them available on incremental ports in the 6000 range (e.g. 6000, 6001, ...). The DLE CLI will also report that the clone is available on a port in the 6000 range. However, please note that these are the ports when accessing the DLE from `localhost`. This Terraform module deploys [Envoy](https://www.envoyproxy.io/) to handle SSL termination and port forwarding to connect to DLE generated clones. Follow the [how-to guide](https://postgres.ai/docs/how-to-guides/administration/install-database-lab-with-terraform) to install Database Lab with Terraform on AWS From 32d6650c099f7b4c3592222b123022626623e213 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Fri, 10 Dec 2021 07:22:34 +0000 Subject: [PATCH 13/25] Multiple terraform module enhancements --- README.md | 18 +++++++++--------- dle-logical-init.sh.tpl | 34 ++++++++++++++++++++++++---------- terraform.tfvars | 13 +++++++++---- variables.tf | 17 +++++++++++------ volumes.tf | 1 + 5 files changed, 54 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index 78da195..844fb66 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Database Lab Terraform Module -This [Terraform Module](https://www.terraform.io/docs/language/modules/index.html) is responsible for deploying the [Database Lab Engine](https://gitlab.com/postgres-ai/database-lab) to cloud hosting providers. +This [Terraform Module](https://www.terraform.io/docs/language/modules/index.html) can be used as a template for deploying the [Database Lab Engine](https://gitlab.com/postgres-ai/database-lab) to cloud hosting providers. Please feel free to tailor it to meet your requirements. Your source PostgreSQL database can be located anywhere, but DLE with other components will be created on an EC2 instance under your AWS account. Currently, only "logical" mode of data retrieval (dump/restore) is supported – the only available method for managed PostgreSQL cloud services such as RDS Postgres, RDS Aurora Postgres, Azure Postgres, or Heroku. "Physical" mode is not yet supported, but it will be in the future. More about various data retrieval options for DLE: https://postgres.ai/docs/how-to-guides/administration/data. @@ -12,7 +12,7 @@ Your source PostgreSQL database can be located anywhere, but DLE with other comp - [Terraform Installed](https://learn.hashicorp.com/tutorials/terraform/install-cli) (minimal version: 1.0.0) - AWS [Route 53](https://aws.amazon.com/route53/) Hosted Zone (For setting up TLS) for a domain or sub-domain you control - You must have AWS Access Keys and a default region in your Terraform environment (See section on required IAM Permissions) -- The DLE runs on an EC2 instance which can be accessed using a selected set of SSH keys uploaded to EC2. Use the Terraform parameter `aws_keypair` to specify which EC2 Keypair to use +- The DLE runs on an EC2 instance which can be accessed using a selected set of SSH keys uploaded to EC2. - Required IAM Permissions: to successfully run this Terraform module, the IAM User/Role must have the following permissions: * Read/Write permissions on EC2 * Read/Write permissions on Route53 @@ -49,16 +49,15 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: ```config - dle_version_full = "2.4.1" + dle_version_full = "2.5.0" aws_ami_name = "DBLABserver*" - aws_keypair = "YOUR_AWS_KEYPAIR" aws_deploy_region = "us-east-1" aws_deploy_ebs_availability_zone = "us-east-1a" - aws_deploy_ec2_instance_type = "t2.large" + aws_deploy_ec2_instance_type = "c5.large" aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" - aws_deploy_ebs_size = "40" + aws_deploy_ebs_size = "10" aws_deploy_ebs_type = "gp2" aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}-engine.aws.postgres @@ -67,13 +66,14 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth source_postgres_host = "ec2-3-215-57-87.compute-1.amazonaws.com" source_postgres_port = "5432" source_postgres_dbname = "d3dljqkrnopdvg" # this is an existing DB (Heroku example DB) - source_postgres_username = "postgres" - + source_postgres_username = "bfxuriuhcfpftt" # in secret.tfvars, use: source_postgres_password = "dfe01cbd809a71efbaecafec5311a36b439460ace161627e5973e278dfe960b7" dle_debug_mode = "true" dle_retrieval_refresh_timetable = "0 0 * * 0" postgres_config_shared_preload_libraries = "pg_stat_statements,logerrors" # DB Migration Checker requires logerrors extension platform_project_name = "aws_test_tf" + + ssh_public_keys_files_list = ["~/.ssh/id_rsa.pub"] ``` 1. Create `secret.tfvars` containing `source_postgres_password`, `platform_access_token`, and `vcs_github_secret_token`. An example: ```config @@ -106,7 +106,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth public_dns_name = "demo-api-engine.aws.postgres.ai" # todo: this should be URL, not hostname – further we'll need URL, with protocol – `https://` ``` -1. To verify result and check the progress, you might want to connect to the just-created EC2 machine using IP address or hostname from the Terraform output. In our example, it can be done using this one-liner (you can find more about DLE logs and configuration on this page: https://postgres.ai/docs/how-to-guides/administration/engine-manage): +1. To verify result and check the progress, you might want to connect to the just-created EC2 machine using IP address or hostname from the Terraform output and ssh key from ssh_public_keys_files_list and/or ssh_public_keys_list variables. In our example, it can be done using this one-liner (you can find more about DLE logs and configuration on this page: https://postgres.ai/docs/how-to-guides/administration/engine-manage): ```shell echo "sudo docker logs dblab_server -f" | ssh ubuntu@18.118.126.25 -i postgres_ext_test.pem ``` diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 957d18e..fcee494 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -86,17 +86,31 @@ EOF sudo systemctl enable envoy sudo systemctl start envoy -#create zfs pools -disks=(${dle_disks}) -for i in $${!disks[@]}; do +# create zfs pools +# Get the full list of disks available and then make attempts +# to create zpool on each. Here we assume that the system disk +# will be skipped because it already has a filesystem. +# This is a "brute force" approach that we probably want to +# rework, but for now we leave it as is because it seems that +# `/dev/../by-id` doesn't really work for all EC2 types. + +disks=$(lsblk -ndp -e7 --output NAME) # TODO: this is not needed, used now for debug only + +i=1 + +sleep 10 # Not elegant at all, we need a better way to wait till the moment when all disks are available + +# Show all disks in alphabetic order; "-e7" to exclude loop devices +for disk in $disks; do sudo zpool create -f \ - -O compression=on \ - -O atime=off \ - -O recordsize=128k \ - -O logbias=throughput \ - -m /var/lib/dblab/dblab_pool_0$i\ - dblab_pool_0$i \ - $${disks[$i]} + -O compression=on \ + -O atime=off \ + -O recordsize=128k \ + -O logbias=throughput \ + -m /var/lib/dblab/dblab_pool_$(printf "%02d" $i)\ + dblab_pool_$(printf "%02d" $i) \ + $disk \ + && ((i=i+1)) # increment if succeeded done # Adjust DLE config diff --git a/terraform.tfvars b/terraform.tfvars index e703575..0f428e2 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,15 +1,15 @@ -dle_version = "2.5.0" +dle_version = "2.5.0" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*" -aws_keypair = "YOUR_AWS_KEYPAIR" aws_deploy_region = "us-east-1" aws_deploy_ebs_availability_zone = "us-east-1a" -aws_deploy_ec2_instance_type = "t2.large" +aws_deploy_ec2_instance_type = "c5.large" aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" -aws_deploy_ebs_size = "40" +aws_deploy_ebs_size = "10" aws_deploy_ebs_type = "gp2" +aws_deploy_ec2_volumes_names = ["/dev/xvdf", "/dev/xvdg",] aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}.aws.postgres.ai @@ -24,3 +24,8 @@ dle_retrieval_refresh_timetable = "0 0 * * 0" postgres_config_shared_preload_libraries = "pg_stat_statements,logerrors" # DB Migration Checker requires logerrors extension platform_project_name = "aws_test_tf" + +# Edit this list to have all public keys that will be placed to +# have them placed to authorized_keys. Instead of ssh_public_keys_files_list, +# it is possible to use ssh_public_keys_list containing public keys as text values. +ssh_public_keys_files_list = ["~/.ssh/id_rsa.pub"] diff --git a/variables.tf b/variables.tf index 62fb669..fc1a3cd 100644 --- a/variables.tf +++ b/variables.tf @@ -27,11 +27,6 @@ variable "aws_deploy_ec2_instance_type" { default = "t2.micro" } -variable "aws_keypair" { - description = "Key pair to access the EC2 instance" - default = "default" -} - variable "aws_deploy_allow_ssh_from_cidrs" { description = "List of CIDRs allowed to connect to SSH" default = ["0.0.0.0/0"] @@ -67,6 +62,11 @@ variable "aws_deploy_ebs_availability_zone" { default = "us-east-1a" } +variable "aws_deploy_ebs_encrypted" { + description = "If EBS volumes used by DLE are encrypted" + default = "true" +} + variable "aws_deploy_ebs_size" { description = "The size (GiB) for data volumes used by DLE" default = "1" @@ -77,12 +77,17 @@ variable "aws_deploy_ebs_type" { default = "gp2" } +# If we need to have more data disks, this array has to be extended. +# TODO: change logic – user sets the number of disks only, not thinking about names variable "aws_deploy_ec2_volumes_names" { description = "List of paths for EBS volumes mounts" + # This list is of "non-nitro" instances. For "nitro" ones, + # the real disk names will be different and in fact these names + # will be ignored. However, we still need to pass something here + # to proceed with the disk attachment. default = [ "/dev/xvdf", "/dev/xvdg", - "/dev/xvdh", ] } diff --git a/volumes.tf b/volumes.tf index 53968c4..86de7f0 100644 --- a/volumes.tf +++ b/volumes.tf @@ -9,6 +9,7 @@ resource "aws_volume_attachment" "ebs_att" { resource "aws_ebs_volume" "DLEVolume" { count = "${length(tolist(var.aws_deploy_ec2_volumes_names))}" availability_zone = "${var.aws_deploy_ebs_availability_zone}" + encrypted = "${var.aws_deploy_ebs_encrypted}" size = "${var.aws_deploy_ebs_size}" type = "${var.aws_deploy_ebs_type}" tags = { From 8cf2b502ad05ab48dc1f6298de11e44c73251121 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Fri, 10 Dec 2021 14:41:46 +0000 Subject: [PATCH 14/25] EBS volumes count management improvement --- README.md | 8 ++++++- dle-logical-init.sh.tpl | 2 +- instance.tf | 1 - terraform.tfvars | 5 ++++- variables.tf | 47 ++++++++++++++++++++++++++++++----------- volumes.tf | 6 +++--- 6 files changed, 50 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 844fb66..2946c15 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ Your source PostgreSQL database can be located anywhere, but DLE with other comp * Read/Write permissions on Cloudwatch ## How to use -- :construction: Currently, it is supposed that you run `terraform` commands on a Linux machine. MacOS and Windows support is not yet implemented (but planned). +- :construction: Currently, it is supposed that you run `terraform` commands on a Linux machine or MacOS. Windows support is not yet implemented (but planned). - It is recommended to clone this Git repository and adjust for your needs. Below we provide the detailed step-by-step instructions for quick start (see "Quick start") for a PoC setup - To configure parameters used by Terraform (and the Database Lab Engine itself), you will need to modify `terraform.tfvars` and create a file with secrets (`secret.tfvars`) - This Terraform module can be run independently or combined with any other standard Terraform module. You can learn more about using Terraform and the Terraform CLI [here](https://www.terraform.io/docs/cli/commands/index.html) @@ -58,6 +58,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth aws_deploy_ec2_instance_type = "c5.large" aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" aws_deploy_ebs_size = "10" + aws_deploy_ec2_volumes_count = "2" aws_deploy_ebs_type = "gp2" aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}-engine.aws.postgres @@ -73,7 +74,12 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth platform_project_name = "aws_test_tf" + # list of ssh public keys stored in files ssh_public_keys_files_list = ["~/.ssh/id_rsa.pub"] + # or provided inline + ssh_public_keys_list = [ + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDhbblazDXCFEc21DtFzprWC8DiqidnVRROzp6J6BeJR9+XydPUtl0Rt2mcNvxL5ro5bI9u5JRW8aDd6s+Orpr66hEDdwQTbT1wp5nyduFQcT3rR +aeDSilQvAHjr4/z/GZ6IgZ5MICSIh5hJJagHoxAVqeS9dCA27tv/n2T2XrxIUeBhywH1EmfwrnEw97tHM8F+yegayFDI1nVOUWUIxFMaygMygix8uKbQ2fl4rkkxG2oEx7uyAFMXHt4bewNbZuAp8b/b5ODL6tGHuHhcwfbWGriCO+l7UOf1K9maTx00o4wkzAPyd+qs70y/1iMX2YOOLYaYYdptEnFal2DVoD example@example.com" + ] ``` 1. Create `secret.tfvars` containing `source_postgres_password`, `platform_access_token`, and `vcs_github_secret_token`. An example: ```config diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index fcee494..281522a 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -181,7 +181,7 @@ case "${source_type}" in .retrieval.spec.logicalRestore.options.dumpLocation="${source_pgdump_s3_mount_point}/${source_pgdump_path_on_s3_bucket}" ' $dle_config_path/server.yml - nProcessors = $(getconf _NPROCESSORS_ONLN) + nProcessors=$(getconf _NPROCESSORS_ONLN) yq e -i ' .retrieval.spec.logicalDump.options.parallelJobs=${postgres_dump_parallel_jobs} | .retrieval.spec.logicalRestore.options.parallelJobs=$nProcessors diff --git a/instance.tf b/instance.tf index 5ff7bd1..c068078 100644 --- a/instance.tf +++ b/instance.tf @@ -49,7 +49,6 @@ resource "aws_instance" "aws_ec2" { dle_verification_token = "${random_string.dle_verification_token.result}" dle_debug_mode = "${var.dle_debug_mode}" dle_retrieval_refresh_timetable = "${var.dle_retrieval_refresh_timetable}" - dle_disks = "${join(" ",var.aws_deploy_ec2_volumes_names)}" dle_version = "${var.dle_version}" joe_version = "${var.joe_version}" aws_deploy_dns_zone_name = "${var.aws_deploy_dns_zone_name}" diff --git a/terraform.tfvars b/terraform.tfvars index 0f428e2..6103db8 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -9,7 +9,7 @@ aws_deploy_ec2_instance_type = "c5.large" aws_deploy_ec2_instance_tag_name = "DBLABserver-ec2instance" aws_deploy_ebs_size = "10" aws_deploy_ebs_type = "gp2" -aws_deploy_ec2_volumes_names = ["/dev/xvdf", "/dev/xvdg",] +aws_deploy_ec2_volumes_count = "2" aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}.aws.postgres.ai @@ -29,3 +29,6 @@ platform_project_name = "aws_test_tf" # have them placed to authorized_keys. Instead of ssh_public_keys_files_list, # it is possible to use ssh_public_keys_list containing public keys as text values. ssh_public_keys_files_list = ["~/.ssh/id_rsa.pub"] +ssh_public_keys_list = [ +"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDhbblazDXCFEc21DtFzprWC8DiqidnVRROzp6J6BeJR9+XydPUtl0Rt2mcNvxL5ro5bI9u5JRW8aDd6s+Orpr66hEDdwQTbT1wp5nyduFQcT3rR+aeDSilQvAHjr4/z/GZ6IgZ5MICSIh5hJJagHoxAVqeS9dCA27tv/n2T2XrxIUeBhywH1EmfwrnEw97tHM8F+yegayFDI1nVOUWUIxFMaygMygix8uKbQ2fl4rkkxG2oEx7uyAFMXHt4bewNbZuAp8b/b5ODL6tGHuHhcwfbWGriCO+l7UOf1K9maTx00o4wkzAPyd+qs70y/1iMX2YOOLYaYYdptEnFal2DVoD example@example.com" +] diff --git a/variables.tf b/variables.tf index fc1a3cd..0237948 100644 --- a/variables.tf +++ b/variables.tf @@ -77,18 +77,9 @@ variable "aws_deploy_ebs_type" { default = "gp2" } -# If we need to have more data disks, this array has to be extended. -# TODO: change logic – user sets the number of disks only, not thinking about names -variable "aws_deploy_ec2_volumes_names" { - description = "List of paths for EBS volumes mounts" - # This list is of "non-nitro" instances. For "nitro" ones, - # the real disk names will be different and in fact these names - # will be ignored. However, we still need to pass something here - # to proceed with the disk attachment. - default = [ - "/dev/xvdf", - "/dev/xvdg", - ] +variable "aws_deploy_ec2_volumes_count" { + description = "Number (from 1 to 22) of EBS volumes attached to EC2 to create ZFS pools" + default = "2" } variable "source_postgres_dbname" { @@ -192,3 +183,35 @@ variable "ssh_public_keys_list"{ description = "List of ssh public keys to copy to the provisioned instance with DLE" default = [] } + +variable "aws_deploy_ec2_ebs_volumes_names" { + description = "List of paths for EBS volumes mounts" + # This list is of "non-nitro" instances. For "nitro" ones, + # the real disk names will be different and in fact these names + # will be ignored. However, we still need to pass something here + # to proceed with the disk attachment. + default = [ + "/dev/xvde", + "/dev/xvdf", + "/dev/xvdg", + "/dev/xvdh", + "/dev/xvdi", + "/dev/xvdj", + "/dev/xvdk", + "/dev/xvdl", + "/dev/xvdm", + "/dev/xvdn", + "/dev/xvdo", + "/dev/xvdp", + "/dev/xvdq", + "/dev/xvdr", + "/dev/xvds", + "/dev/xvdt", + "/dev/xvdu", + "/dev/xvdv", + "/dev/xvdw", + "/dev/xvdx", + "/dev/xvdy", + "/dev/xvdz", + ] +} diff --git a/volumes.tf b/volumes.tf index 86de7f0..2f70818 100644 --- a/volumes.tf +++ b/volumes.tf @@ -1,13 +1,13 @@ resource "aws_volume_attachment" "ebs_att" { - count = "${length(tolist(var.aws_deploy_ec2_volumes_names))}" - device_name = "${element(var.aws_deploy_ec2_volumes_names, count.index)}" + count = "${var.aws_deploy_ec2_volumes_count}" + device_name = "${element(var.aws_deploy_ec2_ebs_volumes_names, count.index)}" volume_id = "${element(aws_ebs_volume.DLEVolume.*.id, count.index)}" instance_id = "${aws_instance.aws_ec2.id}" force_detach = true } resource "aws_ebs_volume" "DLEVolume" { - count = "${length(tolist(var.aws_deploy_ec2_volumes_names))}" + count = "${var.aws_deploy_ec2_volumes_count}" availability_zone = "${var.aws_deploy_ebs_availability_zone}" encrypted = "${var.aws_deploy_ebs_encrypted}" size = "${var.aws_deploy_ebs_size}" From e00072bae9d3fa70f8306d3e6e42d83daae75626 Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Wed, 15 Dec 2021 18:57:55 +0000 Subject: [PATCH 15/25] improve TF module output --- README.md | 27 +++++++++++++++++++-------- dle-logical-init.sh.tpl | 17 ++++++++++++----- outputs.tf | 27 +++++++++++++++++++++++++++ 3 files changed, 58 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 2946c15..1dd2c44 100644 --- a/README.md +++ b/README.md @@ -103,14 +103,25 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. If everything goes well, you should get an output like this: ```config - vcs_db_migration_checker_verification_token = "gsio7KmgaxECfJ80kUx2tUeIf4kEXZex" - dle_verification_token = "zXPodd13LyQaKgVXGmSCeB8TUtnGNnIa" - ec2_public_dns = "ec2-18-118-126-25.us-east-2.compute.amazonaws.com" - ec2instance = "i-0b07738148950af25" - ip = "18.118.126.25" - platform_joe_signing_secret = "lG23qZbUh2kq0ULIBfW6TRwKzqGZu1aP" - public_dns_name = "demo-api-engine.aws.postgres.ai" # todo: this should be URL, not hostname – further we'll need URL, with protocol – `https://` - ``` + + ##################################################################### + + Congratulations! Database Lab Engine installed. + Data initialization may take time, depending on the database size. + + You should be able to work with all DLE interfaces already: + - [RECOMMENDED] UI: https://tf-test.aws.postgres.ai:446 + - CLI: dblab init --url=https://tf-test.aws.postgres.ai --token=sDTPu17pzXhW9DkhcSGpAMj72KgiIJxG --environment="i-0687b060f45314be5" --insecure + - API: https://tf-test.aws.postgres.ai + - SSH connection for troubleshooting: ssh ubuntu@3.92.133.178 -i dmitry-DBLABserver-ec2instance.pem + + (Use verification token: sDTPu17pzXhW9DkhcSGpAMj72KgiIJxG + + For support, go to https://postgres.ai/contact. + + ##################################################################### + + ``` 1. To verify result and check the progress, you might want to connect to the just-created EC2 machine using IP address or hostname from the Terraform output and ssh key from ssh_public_keys_files_list and/or ssh_public_keys_list variables. In our example, it can be done using this one-liner (you can find more about DLE logs and configuration on this page: https://postgres.ai/docs/how-to-guides/administration/engine-manage): ```shell diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 281522a..0095e39 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -190,6 +190,9 @@ case "${source_type}" in esac +# Fix ownership of the dblab directory +chown -R ubuntu.ubuntu /home/ubuntu/.dblab/ + sudo docker run \ --name dblab_server \ --label dblab_control \ @@ -214,11 +217,14 @@ done curl https://gitlab.com/postgres-ai/database-lab/-/raw/${dle_version}/scripts/cli_install.sh | bash sudo mv ~/.dblab/dblab /usr/local/bin/dblab -dblab init \ - --environment-id=tutorial \ - --url=http://localhost:2345 \ - --token=${dle_verification_token} \ - --insecure + +# Init dblab environment +su - ubuntu -c \ + 'dblab init \ + --environment-id=tutorial \ + --url=http://localhost:2345 \ + --token=${dle_verification_token} \ + --insecure' # Configure and run Joe Bot container. joe_config_path="/home/ubuntu/.dblab/joe/configs" @@ -276,3 +282,4 @@ sudo docker run \ --volume /tmp/ci_checker:/tmp/ci_checker \ --volume $ci_checker_config_path:/home/dblab/configs:ro \ registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:${dle_version} + diff --git a/outputs.tf b/outputs.tf index 7ebc6fa..7b86ce1 100644 --- a/outputs.tf +++ b/outputs.tf @@ -28,3 +28,30 @@ output "vcs_db_migration_checker_registration_url" { output "local_ui_url" { value = "${format("%s://%s:%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn),"446")}" } + +locals { + welcome_message = < Date: Wed, 15 Dec 2021 19:00:10 +0000 Subject: [PATCH 16/25] Temporary default DLE version: v3.0.0-rc.2 --- terraform.tfvars | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform.tfvars b/terraform.tfvars index 6103db8..eb1084f 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,4 @@ -dle_version = "2.5.0" # it is also possible to use branch name here (e.g., "master") +dle_version = "v3.0.0-rc.2" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*" From 607214fb40d51341665bf82a11cbcde044551412 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Thu, 16 Dec 2021 08:26:06 +0000 Subject: [PATCH 17/25] Add missing parenthesis ")" --- README.md | 2 +- outputs.tf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1dd2c44..7d2ea68 100644 --- a/README.md +++ b/README.md @@ -115,7 +115,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth - API: https://tf-test.aws.postgres.ai - SSH connection for troubleshooting: ssh ubuntu@3.92.133.178 -i dmitry-DBLABserver-ec2instance.pem - (Use verification token: sDTPu17pzXhW9DkhcSGpAMj72KgiIJxG + (Use verification token: sDTPu17pzXhW9DkhcSGpAMj72KgiIJxG) For support, go to https://postgres.ai/contact. diff --git a/outputs.tf b/outputs.tf index 7b86ce1..e7fbe4b 100644 --- a/outputs.tf +++ b/outputs.tf @@ -44,7 +44,7 @@ locals { - API: ${format("%s://%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn))} - SSH connection for troubleshooting: ssh ubuntu@${aws_instance.aws_ec2.public_ip} -i ${var.aws_deploy_ec2_instance_tag_name}.pem - (Use verification token: ${random_string.dle_verification_token.result} + (Use verification token: ${random_string.dle_verification_token.result}) For support, go to https://postgres.ai/contact. From b413585eb27bc1092a3d3aaf7c9ca475922dc19e Mon Sep 17 00:00:00 2001 From: Anatoly Stansler Date: Thu, 16 Dec 2021 16:34:42 +0000 Subject: [PATCH 18/25] fix: localUI -> embeddedUI --- outputs.tf | 2 +- security.tf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/outputs.tf b/outputs.tf index 7b86ce1..2b5f61c 100644 --- a/outputs.tf +++ b/outputs.tf @@ -25,7 +25,7 @@ output "vcs_db_migration_checker_verification_token" { output "vcs_db_migration_checker_registration_url" { value = "${format("%s://%s:%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn),"445")}" } -output "local_ui_url" { +output "embedded_ui_url" { value = "${format("%s://%s:%s", "https",join("", aws_route53_record.dblab_subdomain.*.fqdn),"446")}" } diff --git a/security.tf b/security.tf index c3dd56c..7dcedf1 100644 --- a/security.tf +++ b/security.tf @@ -17,7 +17,7 @@ resource "aws_security_group_rule" "dle_products" { # 443 - dle_instance_api # 444 - joe_bot_api # 445 - ci_observer_api - # 446 - dle_local_ui + # 446 - dle_embedded_ui security_group_id = aws_security_group.dle_instance_sg.id type = "ingress" from_port = 443 From 6f95e41fe9fb4979d61ab65da647fc031e65382f Mon Sep 17 00:00:00 2001 From: Dmitry Fomin Date: Mon, 20 Dec 2021 13:14:11 +0000 Subject: [PATCH 19/25] switch to S3 as a source for DLE in example, change welcome message output and some cleanup --- README.md | 4 ++-- instance.tf | 4 ---- outputs.tf | 3 ++- terraform.tfvars | 8 ++++---- variables.tf | 2 +- 5 files changed, 9 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 7d2ea68..6bfd419 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Deploy: ``` - terraform apply -var-file="secret.tfvars" -auto-approve + terraform apply -var-file="secret.tfvars" -auto-approve && terraform output -raw next_steps ``` 1. If everything goes well, you should get an output like this: ```config @@ -186,7 +186,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth This is it! -If you need to remove everything created by this Terraform module, you can simply run `terraform destroy`. Do not forget to do it if you're just experimenting. Otherwise, if you leave infrastructure blocks running, they might significantly affect your AWS bill (depending on the EC2 instance family you've chosen, the disk type, and size). +If you need to remove everything created by this Terraform module, you can run `terraform destroy -var-file="secret.tfvars" -auto-approve`. Do not forget to do it if you're just experimenting. Otherwise, if you leave infrastructure blocks running, they might significantly affect your AWS bill (depending on the EC2 instance family you've chosen, the disk type, and size). ## Important Note When the DLE creates new database clones, it makes them available on incremental ports in the 6000 range (e.g. 6000, 6001, ...). The DLE CLI will also report that the clone is available on a port in the 6000 range. However, please note that these are the ports when accessing the DLE from `localhost`. This Terraform module deploys [Envoy](https://www.envoyproxy.io/) to handle SSL termination and port forwarding to connect to DLE generated clones. diff --git a/instance.tf b/instance.tf index c068078..1dbd63b 100644 --- a/instance.tf +++ b/instance.tf @@ -1,7 +1,3 @@ -locals { - public_key = file("~/.ssh/id_rsa.pub") - } - resource "tls_private_key" "ssh_key" { algorithm = "RSA" rsa_bits = 4096 diff --git a/outputs.tf b/outputs.tf index 05361ae..559497b 100644 --- a/outputs.tf +++ b/outputs.tf @@ -52,6 +52,7 @@ locals { EOT } -output "zzz_next_steps" { +output "next_steps" { value = local.welcome_message + sensitive = true } diff --git a/terraform.tfvars b/terraform.tfvars index eb1084f..fec09da 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -14,10 +14,6 @@ aws_deploy_allow_ssh_from_cidrs = ["0.0.0.0/0"] aws_deploy_dns_api_subdomain = "tf-test" # subdomain in aws.postgres.ai, fqdn will be ${dns_api_subdomain}.aws.postgres.ai source_postgres_version = "13" -source_postgres_host = "ec2-3-215-57-87.compute-1.amazonaws.com" -source_postgres_port = "5432" -source_postgres_dbname = "d3dljqkrnopdvg" # this is an existing DB (Heroku example DB) -source_postgres_username = "bfxuriuhcfpftt" # in secret.tfvars, use: source_postgres_password = "dfe01cbd809a71efbaecafec5311a36b439460ace161627e5973e278dfe960b7" dle_debug_mode = "true" dle_retrieval_refresh_timetable = "0 0 * * 0" @@ -25,6 +21,10 @@ postgres_config_shared_preload_libraries = "pg_stat_statements,logerrors" # DB M platform_project_name = "aws_test_tf" +source_type = "s3" # source is dump stored on demo s3 bucket +source_pgdump_s3_bucket = "tf-demo-dump" # This is an example public bucket +source_pgdump_path_on_s3_bucket = "heroku_sql.sql" # This is an example dump from demo database + # Edit this list to have all public keys that will be placed to # have them placed to authorized_keys. Instead of ssh_public_keys_files_list, # it is possible to use ssh_public_keys_list containing public keys as text values. diff --git a/variables.tf b/variables.tf index 0237948..4addf32 100644 --- a/variables.tf +++ b/variables.tf @@ -99,7 +99,7 @@ variable "source_postgres_port" { variable "source_postgres_username" { description = "Source database username" - sensitive = true + default = "postgres" } variable "source_postgres_password" { From 6f38aff14462baa4f34e9cfb9f53fdccc6657a6f Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Tue, 21 Dec 2021 19:42:06 +0000 Subject: [PATCH 20/25] Switch to v3.0.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6bfd419..858192a 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: ```config - dle_version_full = "2.5.0" + dle_version_full = "v3.0.0" aws_ami_name = "DBLABserver*" From 1eb902a290940bc5254d535b3093c5ac991fe912 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Tue, 21 Dec 2021 19:43:48 +0000 Subject: [PATCH 21/25] Update terraform.tfvars --- terraform.tfvars | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform.tfvars b/terraform.tfvars index fec09da..6199afb 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,4 @@ -dle_version = "v3.0.0-rc.2" # it is also possible to use branch name here (e.g., "master") +dle_version = "v3.0.0" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*" From 626fb8deae887f50ed35e18da08041cdfcca7951 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Tue, 21 Dec 2021 20:02:41 +0000 Subject: [PATCH 22/25] remove v --- README.md | 2 +- terraform.tfvars | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 858192a..b608d0e 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: ```config - dle_version_full = "v3.0.0" + dle_version_full = "3.0.0" aws_ami_name = "DBLABserver*" diff --git a/terraform.tfvars b/terraform.tfvars index 6199afb..97c62cf 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,4 @@ -dle_version = "v3.0.0" # it is also possible to use branch name here (e.g., "master") +dle_version = "3.0.0" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*" From f7c7acf98a7b30a1f4f459413571dd9119dabe50 Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Mon, 27 Dec 2021 01:47:59 +0000 Subject: [PATCH 23/25] fix: adjust image version --- dle-logical-init.sh.tpl | 6 ++++-- terraform.tfvars | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dle-logical-init.sh.tpl b/dle-logical-init.sh.tpl index 0095e39..8306166 100644 --- a/dle-logical-init.sh.tpl +++ b/dle-logical-init.sh.tpl @@ -193,6 +193,8 @@ esac # Fix ownership of the dblab directory chown -R ubuntu.ubuntu /home/ubuntu/.dblab/ +image_version=$(echo ${dle_version} | sed 's/v*//') + sudo docker run \ --name dblab_server \ --label dblab_control \ @@ -207,7 +209,7 @@ sudo docker run \ --env DOCKER_API_VERSION=1.39 \ --detach \ --restart on-failure \ -registry.gitlab.com/postgres-ai/database-lab/dblab-server:${dle_version} +registry.gitlab.com/postgres-ai/database-lab/dblab-server:$image_version ### Waiting for the Database Lab Engine initialization. for i in {1..30000}; do @@ -281,5 +283,5 @@ sudo docker run \ --volume /var/run/docker.sock:/var/run/docker.sock \ --volume /tmp/ci_checker:/tmp/ci_checker \ --volume $ci_checker_config_path:/home/dblab/configs:ro \ -registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:${dle_version} +registry.gitlab.com/postgres-ai/database-lab/dblab-ci-checker:$image_version diff --git a/terraform.tfvars b/terraform.tfvars index 97c62cf..6199afb 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,4 @@ -dle_version = "3.0.0" # it is also possible to use branch name here (e.g., "master") +dle_version = "v3.0.0" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*" From 68062ba5d881d88e4f3334d523b646a1b7c71481 Mon Sep 17 00:00:00 2001 From: Artyom Kartasov Date: Wed, 9 Feb 2022 20:38:54 +0000 Subject: [PATCH 24/25] Apply 1 suggestion(s) to 1 file(s) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b608d0e..92d8d53 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: ```config - dle_version_full = "3.0.0" + dle_version = "v3.0.0" aws_ami_name = "DBLABserver*" From 3f655ddb47b1be31b83e832bf90ca346cb612d61 Mon Sep 17 00:00:00 2001 From: Nikolay Samokhvalov Date: Wed, 9 Feb 2022 20:39:32 +0000 Subject: [PATCH 25/25] 3.0.1 --- README.md | 2 +- terraform.tfvars | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 92d8d53..073cbd0 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ The following steps were tested on Ubuntu 20.04 but supposed to be valid for oth ``` 1. Edit `terraform.tfvars` file. In our example, we will use Heroku demo database as a source: ```config - dle_version = "v3.0.0" + dle_version = "v3.0.1" aws_ami_name = "DBLABserver*" diff --git a/terraform.tfvars b/terraform.tfvars index 6199afb..2a93afa 100644 --- a/terraform.tfvars +++ b/terraform.tfvars @@ -1,4 +1,4 @@ -dle_version = "v3.0.0" # it is also possible to use branch name here (e.g., "master") +dle_version = "v3.0.1" # it is also possible to use branch name here (e.g., "master") joe_version = "0.10.0" aws_ami_name = "DBLABserver*"