diff --git a/packages/data-api/scripts/buildAnalyticsMaterializedView.sh b/packages/data-api/scripts/buildAnalyticsMaterializedView.sh index 05a4638d62..16c1ad4d6a 100755 --- a/packages/data-api/scripts/buildAnalyticsMaterializedView.sh +++ b/packages/data-api/scripts/buildAnalyticsMaterializedView.sh @@ -1,16 +1,17 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" cd scripts export PGPASSWORD=$DB_PASSWORD -if [[ "$1" == "--force" || "$1" == "-f" ]]; then - psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT build_analytics_table(true);" +if [[ $1 = '--force' || $1 = '-f' ]]; then + psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT build_analytics_table(true);" else - psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT build_analytics_table();" + psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT build_analytics_table();" fi -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT create_analytics_table_indexes();" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT create_analytics_table_indexes();" diff --git a/packages/data-api/scripts/dropAnalyticsMaterializedView.sh b/packages/data-api/scripts/dropAnalyticsMaterializedView.sh index c961a67eaa..c47ac0ca89 100755 --- a/packages/data-api/scripts/dropAnalyticsMaterializedView.sh +++ b/packages/data-api/scripts/dropAnalyticsMaterializedView.sh @@ -1,12 +1,13 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" cd scripts export PGPASSWORD=$DB_PASSWORD -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT drop_analytics_table();" -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT drop_analytics_log_tables();" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT drop_analytics_table();" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT drop_analytics_log_tables();" diff --git a/packages/data-api/scripts/fastRefreshAnalyticsTable.sh b/packages/data-api/scripts/fastRefreshAnalyticsTable.sh index 5df9fd9863..6d74943671 100755 --- a/packages/data-api/scripts/fastRefreshAnalyticsTable.sh +++ b/packages/data-api/scripts/fastRefreshAnalyticsTable.sh @@ -1,12 +1,14 @@ -#!/bin/bash -e -echo "Fast refreshing analytics table" +#!/usr/bin/env bash +set -e + +echo 'Fast refreshing analytics table' DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" cd scripts export PGPASSWORD=$DB_PASSWORD -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT mv\$refreshMaterializedView('analytics', 'public', true);" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT mv\$refreshMaterializedView('analytics', 'public', true);" diff --git a/packages/data-api/scripts/fullRefreshAnalyticsTable.sh b/packages/data-api/scripts/fullRefreshAnalyticsTable.sh index 65da86c341..159101fd5d 100755 --- a/packages/data-api/scripts/fullRefreshAnalyticsTable.sh +++ b/packages/data-api/scripts/fullRefreshAnalyticsTable.sh @@ -1,14 +1,16 @@ -#!/bin/bash -e -echo "Fully refreshing analytics table" +#!/usr/bin/env bash +set -e + +echo 'Fully refreshing analytics table' DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" cd scripts export PGPASSWORD=$DB_PASSWORD -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT drop_analytics_table_indexes();" -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT mv\$refreshMaterializedView('analytics', 'public');" -psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_USER -tc "SELECT create_analytics_table_indexes();" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT drop_analytics_table_indexes();" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT mv\$refreshMaterializedView('analytics', 'public');" +psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -tc "SELECT create_analytics_table_indexes();" diff --git a/packages/data-api/scripts/installMvRefreshModule.sh b/packages/data-api/scripts/installMvRefreshModule.sh index 087baef3de..07e34e761e 100755 --- a/packages/data-api/scripts/installMvRefreshModule.sh +++ b/packages/data-api/scripts/installMvRefreshModule.sh @@ -1,6 +1,4 @@ #!/bin/bash -DIR=$(dirname "$0") - DIR=$(pwd "$0") source "$DIR/../../scripts/bash/mergeEnvForDB.sh" @@ -9,10 +7,10 @@ source "$DIR/../../scripts/bash/mergeEnvForDB.sh" : "${DB_PORT:=5432}" export PGPASSWORD=$DB_PG_PASSWORD -MV_REFRESH_EXISTS=`psql -p $DB_PORT -X -A -h $DB_URL -d $DB_NAME -U $DB_PG_USER -t -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '$DB_MV_USER'"` +MV_REFRESH_EXISTS=$(psql -p "$DB_PORT" -X -A -h "$DB_URL" -d "$DB_NAME" -U "$DB_PG_USER" -t -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '$DB_MV_USER'") -if [ "$MV_REFRESH_EXISTS" == "$DB_MV_USER" ]; then - echo "Fast Refresh module already exists, skipping installation" +if [[ $MV_REFRESH_EXISTS = "$DB_MV_USER" ]]; then + echo 'Fast Refresh module already exists, skipping installation' else git submodule update --init scripts/pg-mv-fast-refresh cd scripts/pg-mv-fast-refresh/ @@ -23,4 +21,4 @@ else fi export PGPASSWORD=$DB_PG_PASSWORD -psql -p $DB_PORT --set=db_user="$DB_USER" --set=mv_user="$DB_MV_USER" --set=db_name="$DB_NAME" -h $DB_URL -d $DB_NAME -U $DB_PG_USER -f scripts/grantMvRefreshPermissions.sql +psql -p "$DB_PORT" --set=db_user="$DB_USER" --set=mv_user="$DB_MV_USER" --set=db_name="$DB_NAME" -h "$DB_URL" -d "$DB_NAME" -U "$DB_PG_USER" -f scripts/grantMvRefreshPermissions.sql diff --git a/packages/data-api/scripts/patchMvRefreshModule.sh b/packages/data-api/scripts/patchMvRefreshModule.sh index d38e9d73c9..86b936711e 100755 --- a/packages/data-api/scripts/patchMvRefreshModule.sh +++ b/packages/data-api/scripts/patchMvRefreshModule.sh @@ -1,42 +1,43 @@ -#!/bin/bash +#!/usr/bin/env bash DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/ansiControlSequences.sh" COMMAND=$1 -if [[ "$COMMAND" == "" ]]; then - echo "Error: missing patch command! Must be one of: up, down, create" +if [[ $COMMAND = '' ]]; then + echo "${RED}Error: missing patch command! Must be one of: up, down, create${RESET}" exit 1 fi -if [[ "$COMMAND" == "create" ]]; then - echo "Enter patch name: " +if [[ $COMMAND = create ]]; then + echo 'Enter patch name: ' read PATCH_NAME fi VERSION=$2 -if [[ "$VERSION" == "" ]]; then - echo "Version unspecified, defaulting to database mvrefresh version" +if [[ $VERSION = '' ]]; then + echo 'Version unspecified, defaulting to database mvrefresh version' # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" export PGPASSWORD=$DB_PASSWORD VERSION_SQL_FUNC="SELECT mv\$version()" - VERSION=`psql -p $DB_PORT -X -A -h $DB_URL -d $DB_NAME -U $DB_USER -t -c "$VERSION_SQL_FUNC"` + VERSION=$(psql -p "$DB_PORT" -X -A -h "$DB_URL" -d "$DB_NAME" -U "$DB_USER" -t -c "$VERSION_SQL_FUNC") - if [[ "$VERSION" == "" ]]; then - echo "Error: failed to detect mvrefresh version from database" + if [[ $VERSION = '' ]]; then + echo "${RED}Error: failed to detect mvrefresh version from database${RESET}" exit 1 fi - echo "Using version: $VERSION" + echo "Using version: ${BOLD}${GREEN}$VERSION${RESET}" fi -if [[ ! -d "./scripts/patches/$VERSION" && ! "$COMMAND" == "create" ]]; then +if [[ ! -d ./scripts/patches/$VERSION && $COMMAND != create ]]; then echo "No patches exist for version: $VERSION, skipping" else - ts-node ./scripts/patchMvRefresh.ts $COMMAND:$VERSION $PATCH_NAME --migrations-dir "./scripts/patches" --table "patches" -v --config-file "../../babel.config.json" + ts-node ./scripts/patchMvRefresh.ts $COMMAND:$VERSION $PATCH_NAME --migrations-dir './scripts/patches' --table 'patches' -v --config-file '../../babel.config.json' fi diff --git a/packages/data-api/scripts/refreshAnalyticsTable.sh b/packages/data-api/scripts/refreshAnalyticsTable.sh index 52e1a6aa2a..8810ca311f 100755 --- a/packages/data-api/scripts/refreshAnalyticsTable.sh +++ b/packages/data-api/scripts/refreshAnalyticsTable.sh @@ -1,6 +1,7 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e -if [[ "$1" == "--full" || "$1" == "-f" ]]; then +if [[ $1 = '--full' || $1 = '-f' ]]; then ./scripts/fullRefreshAnalyticsTable.sh else ./scripts/fastRefreshAnalyticsTable.sh diff --git a/packages/data-api/scripts/uninstallMvRefreshModule.sh b/packages/data-api/scripts/uninstallMvRefreshModule.sh index 9880e0ab60..c21ffe8b55 100755 --- a/packages/data-api/scripts/uninstallMvRefreshModule.sh +++ b/packages/data-api/scripts/uninstallMvRefreshModule.sh @@ -1,16 +1,17 @@ -#!/bin/bash +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" export PGPASSWORD=$DB_PG_PASSWORD -MV_REFRESH_EXISTS=`psql -X -A -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_PG_USER -t -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '$DB_MV_USER'"` +MV_REFRESH_EXISTS=$(psql -X -A -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_PG_USER" -t -c "SELECT schema_name FROM information_schema.schemata WHERE schema_name = '$DB_MV_USER'") -if [ "$MV_REFRESH_EXISTS" != "$DB_MV_USER" ]; then - echo "Fast Refresh module does not exist, skipping uninstallation" +if [[ $MV_REFRESH_EXISTS != "$DB_MV_USER" ]]; then + echo 'Fast Refresh module does not exist, skipping uninstallation' else git submodule update --init scripts/pg-mv-fast-refresh cd scripts/pg-mv-fast-refresh/ @@ -19,5 +20,5 @@ else cd ../.. git submodule deinit scripts/pg-mv-fast-refresh export PGPASSWORD=$DB_PG_PASSWORD - psql -p $DB_PORT -h $DB_URL -d $DB_NAME -U $DB_PG_USER -tc "REVOKE ALL PRIVILEGES on schema public FROM $DB_MV_USER; DROP ROLE IF EXISTS $DB_MV_USER;" + psql -p "$DB_PORT" -h "$DB_URL" -d "$DB_NAME" -U "$DB_PG_USER" -tc "REVOKE ALL PRIVILEGES on schema public FROM $DB_MV_USER; DROP ROLE IF EXISTS $DB_MV_USER;" fi diff --git a/packages/data-lake-api/scripts/checkTestDataLakeExists.sh b/packages/data-lake-api/scripts/checkTestDataLakeExists.sh index 8430504640..9e35e149b9 100755 --- a/packages/data-lake-api/scripts/checkTestDataLakeExists.sh +++ b/packages/data-lake-api/scripts/checkTestDataLakeExists.sh @@ -1,15 +1,22 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/ansiControlSequences.sh" # Set default port in case it wasn't in .env : "${DATA_LAKE_DB_PORT:=5432}" -if [ "$(PGPASSWORD=$DB_PG_PASSWORD psql -p $DATA_LAKE_DB_PORT -X -A -h $DATA_LAKE_DB_URL -U $DB_PG_USER -t -c "SELECT 1 FROM pg_database WHERE datname='$DATA_LAKE_DB_NAME'" )" = '1' ] -then +if [[ "$(PGPASSWORD=$DB_PG_PASSWORD psql -p $DATA_LAKE_DB_PORT -X -A -h $DATA_LAKE_DB_URL -U $DB_PG_USER -t -c "SELECT 1 FROM pg_database WHERE datname='$DATA_LAKE_DB_NAME'" )" = '1' ]]; then exit 0 fi -echo -e "Error: $DATA_LAKE_DB_NAME database does not exist!\n\nTo create it, please get the .env file from Bitwarden then run:\nyarn workspace @tupaia/data-lake-api setup-test-data-lake\n" +echo -e "${RED}Error: $DATA_LAKE_DB_NAME database does not exist!${RESET}" +echo 'To create it, make sure you have the environment variables from Bitwarden and run:' +echo +echo -e " ${BOLD}yarn workspace @tupaia/data-lake-api setup-test-data-lake${RESET}" +echo +echo -e "If you’re missing environment variables, see ${MAGENTA}https://beyond-essential.slab.com/posts/tupaia-monorepo-setup-v5egpdpq#hvfnz-set-environment-variables${RESET}." + exit 1 diff --git a/packages/data-lake-api/scripts/setupTestDataLake.sh b/packages/data-lake-api/scripts/setupTestDataLake.sh index f9563a4065..aa6f5c4ead 100755 --- a/packages/data-lake-api/scripts/setupTestDataLake.sh +++ b/packages/data-lake-api/scripts/setupTestDataLake.sh @@ -1,14 +1,15 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DATA_LAKE_DB_PORT:=5432}" TUPAIA_USER_EXISTS=`PGPASSWORD=$DB_PG_PASSWORD psql -p $DATA_LAKE_DB_PORT -X -A -h $DATA_LAKE_DB_URL -U $DB_PG_USER -t -c "SELECT rolname FROM pg_catalog.pg_roles WHERE rolname = '$DATA_LAKE_DB_USER'"` -if [ -z "$TUPAIA_USER_EXISTS" ]; then +if [[ -z $TUPAIA_USER_EXISTS ]]; then PGPASSWORD=$DB_PG_PASSWORD psql -h $DATA_LAKE_DB_URL -p $DATA_LAKE_DB_PORT -U $DB_PG_USER -c "CREATE ROLE $DATA_LAKE_DB_USER LOGIN PASSWORD '$DATA_LAKE_DB_PASSWORD'" fi diff --git a/packages/database/scripts/checkTestDatabaseExists.sh b/packages/database/scripts/checkTestDatabaseExists.sh index e3bd945184..14f7c027b5 100755 --- a/packages/database/scripts/checkTestDatabaseExists.sh +++ b/packages/database/scripts/checkTestDatabaseExists.sh @@ -1,20 +1,22 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/ansiControlSequences.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" # Set default port in case it wasn't in .env : "${DB_PORT:=5432}" -if [ "$(PGPASSWORD=$DB_PG_PASSWORD psql -p $DB_PORT -X -A -h $DB_URL -U $DB_PG_USER -t -c "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" = '1' ] -then +if [ "$(PGPASSWORD=$DB_PG_PASSWORD psql -p $DB_PORT -X -A -h $DB_URL -U $DB_PG_USER -t -c "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" )" = '1' ]; then exit 0 fi -echo -e "\033[31mError: $DB_NAME database does not exist!\033[m" -echo "To create it, get the .env file from Bitwarden then run:" -echo "" -echo -e " \033[1myarn workspace @tupaia/database setup-test-database\033[m" -echo "" +echo -e "${RED}Error: $DB_NAME database does not exist!${RESET}" +echo 'To create it, make sure you have the environment variables from Bitwarden and run:' +echo +echo -e " ${BOLD}yarn workspace @tupaia/database setup-test-database${RESET}" +echo +echo -e "If you’re missing environment variables, see ${MAGENTA}https://beyond-essential.slab.com/posts/tupaia-monorepo-setup-v5egpdpq#hvfnz-set-environment-variables${RESET}." exit 1 diff --git a/packages/database/scripts/dumpDatabase.sh b/packages/database/scripts/dumpDatabase.sh index 9adf595aed..673506efbf 100755 --- a/packages/database/scripts/dumpDatabase.sh +++ b/packages/database/scripts/dumpDatabase.sh @@ -1,4 +1,5 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e function print_help() { cat </dev/null + while kill -0 "$pid" 2>/dev/null do i=$(( (i+1) %4 )) printf "\r$1 ${spin:$i:1}" sleep .5 done printf "\r$1 " - echo "" # reset prompt + echo # reset prompt } DIR=$(pwd "$0") -source "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/mergeEnvForDB.sh" +. "$DIR/../../scripts/bash/ansiControlSequences.sh" -DUMP_FILE_NAME="dump.sql" +DUMP_FILE_NAME='dump.sql' -identity_file="" -server="dev" -target_dir="." +identity_file='' +server='dev' +target_dir='.' -while [ "$1" != "" ]; do +while [[ $1 != '' ]]; do case $1 in -s | --server) shift @@ -55,7 +57,7 @@ while [ "$1" != "" ]; do exit ;; *) - if [ "$identity_file" == "" ]; then + if [[ $identity_file = '' ]]; then identity_file=$1 shift else @@ -66,13 +68,13 @@ while [ "$1" != "" ]; do esac done -if [ "$identity_file" == "" ]; then +if [[ $identity_file = '' ]]; then print_help exit 1 fi -if [ "$DB_PG_USER" == "" ] || [ "$DB_PG_PASSWORD" == "" ]; then - echo "Missing postgres user credential env vars in @tupaia/database .env file. Check Bitwarden for variables and add them to the .env file" +if [[ $DB_PG_USER = '' || $DB_PG_PASSWORD = '' ]]; then + echo -e "${RED}Missing Postgres user credential env vars in @tupaia/database .env file.${RESET} Check Bitwarden for variables and add them to the .env file" exit 1 fi @@ -86,6 +88,5 @@ target_zip_path="$target_path.gz" show_loading_spinner "Dumping database to $target_zip_path" "PGPASSWORD=$DB_PG_PASSWORD pg_dump \"host=$host user=$DB_PG_USER dbname=tupaia sslmode=require sslkey=$identity_file\" -Z1 -f $target_zip_path" show_loading_spinner "Unzipping $target_zip_path" "gunzip -f $target_zip_path" -echo "Dump file available at $target_path" - -echo "Done!" +echo "Dump file available at $target_path" +echo -e "${GREEN}Done!${RESET}" diff --git a/packages/database/scripts/migrateCreate.sh b/packages/database/scripts/migrateCreate.sh index a50752295f..390e47e89b 100755 --- a/packages/database/scripts/migrateCreate.sh +++ b/packages/database/scripts/migrateCreate.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash read -p "Enter migration name: " MIGRATION_NAME read -p "Enter the scope of this migration ('schema' or 'data'): " MIGRATION_SCOPE diff --git a/packages/devops/scripts/ci/triggerRedeploy.sh b/packages/devops/scripts/ci/triggerRedeploy.sh index be7fb3b922..a92e893ae7 100755 --- a/packages/devops/scripts/ci/triggerRedeploy.sh +++ b/packages/devops/scripts/ci/triggerRedeploy.sh @@ -1,10 +1,11 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e STOPPED_INSTANCES=$(aws ec2 describe-instances \ --filters Name=tag:Branch,Values=${CI_BRANCH} Name=tag:DeploymentType,Values=tupaia Name=instance-state-name,Values=stopped \ --no-cli-pager) -if [[ $STOPPED_INSTANCES == *"Instances"* ]]; then +if [[ $STOPPED_INSTANCES = *"Instances"* ]]; then echo "Can't redeploy while a deployment for ${CI_BRANCH} is stopped. Try again inside office hours, or start the app server and database then restart the build." exit 1 fi @@ -15,7 +16,7 @@ RUNNING_INSTANCES=$(aws ec2 describe-instances \ --no-cli-pager) if [[ $RUNNING_INSTANCES != *"Instances"* ]]; then - echo "No deployment running, skipping redeploy" + echo 'No deployment running, skipping redeploy' exit 0 fi @@ -30,35 +31,35 @@ AWS_MAX_ATTEMPTS=1 aws lambda invoke \ $RESPONSE_FILE if grep -q errorMessage "$RESPONSE_FILE"; then - echo "Error while trying to redeploy" + echo 'Error while trying to redeploy' cat $RESPONSE_FILE exit 1 fi -DEPLOYMENTS=$(cat $RESPONSE_FILE | jq -r '.[] | @base64') +DEPLOYMENTS=$(cat "$RESPONSE_FILE" | jq -r '.[] | @base64') for DEPLOYMENT_BASE64 in $DEPLOYMENTS; do - DEPLOYMENT=$(echo $DEPLOYMENT_BASE64 | base64 --decode) - DEPLOYMENT_NAME=$(echo $DEPLOYMENT | jq -r '.DeploymentName') - NEW_INSTANCE_ID=$(echo $DEPLOYMENT | jq -r '.NewInstanceId') + DEPLOYMENT=$(echo "$DEPLOYMENT_BASE64" | base64 --decode) + DEPLOYMENT_NAME=$(echo "$DEPLOYMENT" | jq -r '.DeploymentName') + NEW_INSTANCE_ID=$(echo "$DEPLOYMENT" | jq -r '.NewInstanceId') - echo "Waiting for ${DEPLOYMENT_NAME} to run its startup build script. To watch detailed progress, connect to instance ${NEW_INSTANCE_ID} and run tail -f logs/deployment_log.txt" + echo "Waiting for $DEPLOYMENT_NAME to run its startup build script. To watch detailed progress, connect to instance $NEW_INSTANCE_ID and run tail -f logs/deployment_log.txt" WAIT_ATTEMPTS=0 while true; do STARTUP_COMPLETE=false aws ec2 wait instance-exists \ - --instance-ids ${NEW_INSTANCE_ID} \ + --instance-ids "$NEW_INSTANCE_ID" \ --filters Name=tag:StartupBuildProgress,Values=complete,errored \ --no-cli-pager && STARTUP_COMPLETE=true - if [ $STARTUP_COMPLETE == true ]; then + if [[ $STARTUP_COMPLETE = true ]]; then INSTANCE_ERRORED_RESPONSE=$(aws ec2 describe-instances \ - --instance-ids ${NEW_INSTANCE_ID} \ + --instance-ids "$NEW_INSTANCE_ID" \ --filters Name=tag:StartupBuildProgress,Values=errored \ --no-cli-pager) - if [[ $INSTANCE_ERRORED_RESPONSE == *"Instances"* ]]; then - echo "Build failed! Connect to instance ${NEW_INSTANCE_ID} and check the logs at ~/logs/deployment_log.txt and /var/log/cloud-init-output.log" + if [[ $INSTANCE_ERRORED_RESPONSE = *"Instances"* ]]; then + echo "Build failed! Connect to instance $NEW_INSTANCE_ID and check the logs at ~/logs/deployment_log.txt and /var/log/cloud-init-output.log" exit 1 fi - echo "New instance ${NEW_INSTANCE_ID} is ready, swapping over ELB" + echo "New instance $NEW_INSTANCE_ID is ready, swapping over ELB" SWAP_OUT_RESPONSE_FILE=lambda_swap_out_response.json AWS_MAX_ATTEMPTS=1 aws lambda invoke \ --function-name deployment \ @@ -66,24 +67,24 @@ for DEPLOYMENT_BASE64 in $DEPLOYMENTS; do --no-cli-pager \ --cli-binary-format raw-in-base64-out \ --cli-read-timeout 900 \ - $SWAP_OUT_RESPONSE_FILE + "$SWAP_OUT_RESPONSE_FILE" if grep -q errorMessage "$SWAP_OUT_RESPONSE_FILE"; then - echo "Error while trying to swap out instances" - cat $SWAP_OUT_RESPONSE_FILE + echo 'Error while trying to swap out instances' + cat "$SWAP_OUT_RESPONSE_FILE" exit 1 fi - echo "ELB for ${DEPLOYMENT_NAME} now points to ${NEW_INSTANCE_ID}" + echo "ELB for $DEPLOYMENT_NAME now points to $NEW_INSTANCE_ID" break else - if [ "$WAIT_ATTEMPTS" -ge 75 ]; then # 75 * 200 seconds = 4.16 hours, sitting within codeship's 5 hour timeout - echo "Build failed! Waited 75 times, but new instance is still not reachable" + if (( WAIT_ATTEMPTS >= 75 )); then # 75 * 200 seconds = 4.16 hours, sitting within codeship's 5 hour timeout + echo 'Build failed! Waited 75 times, but new instance is still not reachable' exit 1 else - echo "Still waiting for ${DEPLOYMENT_NAME} startup build to complete. To watch detailed progress, connect to instance ${NEW_INSTANCE_ID} and run tail -f logs/deployment_log.txt" - WAIT_ATTEMPTS=$((WAIT_ATTEMPTS+1)) + echo "Still waiting for $DEPLOYMENT_NAME startup build to complete. To watch detailed progress, connect to instance $NEW_INSTANCE_ID and run tail -f logs/deployment_log.txt" + (( WAIT_ATTEMPTS++ )) fi fi done done -echo "Redeploy complete" +echo 'Redeploy complete' diff --git a/packages/devops/scripts/ci/utils.sh b/packages/devops/scripts/ci/utils.sh index f0aa34140c..135ea69e5e 100755 --- a/packages/devops/scripts/ci/utils.sh +++ b/packages/devops/scripts/ci/utils.sh @@ -8,19 +8,19 @@ function ansi_color() { } function log_with_color() { - echo -e "$(ansi_color $2)$1$(ansi_color $COLOR_RESET)" + echo -e "$(ansi_color "$2")$1$(ansi_color "$COLOR_RESET")" } function log_error() { - log_with_color "$1" $COLOR_RED + log_with_color "$1" "$COLOR_RED" } function log_warn() { - log_with_color "$1" $COLOR_YELLOW + log_with_color "$1" "$COLOR_YELLOW" } function log_success() { - log_with_color "$1" $COLOR_GREEN + log_with_color "$1" "$COLOR_GREEN" } function get_max_length() { @@ -29,20 +29,20 @@ function get_max_length() { for item in "${array[@]}"; do length=${#item} - if [[ $length -gt $max ]]; then + if (( length > max )); then max=$length fi done - echo $max + echo "$max" } function get_branch_name() { local branch_name="$CI_BRANCH" - if [[ $branch_name == "" ]]; then + if [[ $branch_name = '' ]]; then # Get currently checked out branch - branch_name=$(git rev-parse --abbrev-ref HEAD) + branch_name=$(git branch --show-current) fi - echo $branch_name -} \ No newline at end of file + echo "$branch_name" +} diff --git a/packages/devops/scripts/ci/validateBranchName.sh b/packages/devops/scripts/ci/validateBranchName.sh index d0273e59af..4b5c680d82 100755 --- a/packages/devops/scripts/ci/validateBranchName.sh +++ b/packages/devops/scripts/ci/validateBranchName.sh @@ -1,7 +1,8 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -. ${DIR}/utils.sh +. "$DIR/utils.sh" INVALID_CHARS=('/' '\' '.' '&' '?' '_') SUBDOMAIN_SUFFIXES=(admin admin-api aggregation api config db export lesmis lesmis-api mobile psss psss-api report report-api ssh entity entity-api meditrak-api data-table-api tonga-aggregation www tupaia-web-api datatrak-web-api datatrak) @@ -10,19 +11,19 @@ SUBDOMAIN_SUFFIXES=(admin admin-api aggregation api config db export lesmis lesm # to create deployment urls, eg {{branchName}}-tonga-aggregation.tupaia.org MAX_SUBDOMAIN_LENGTH=64 MAX_SUBDOMAIN_SUFFIX_LENGTH=$(get_max_length "${SUBDOMAIN_SUFFIXES[@]}") -MAX_BRANCH_NAME_LENGTH=$((MAX_SUBDOMAIN_LENGTH - ${MAX_SUBDOMAIN_SUFFIX_LENGTH} - 1)) # Subtract 1 for the connecting `-` +MAX_BRANCH_NAME_LENGTH=$((MAX_SUBDOMAIN_LENGTH - MAX_SUBDOMAIN_SUFFIX_LENGTH - 1)) # Subtract 1 for the connecting `-` # As of 11/08/21, MAX_BRANCH_NAME_LENGTH = 64 - 17 - 1 = 46 (Longest subdomain "tonga-aggregation") function validate_name_ending() { local branch_name=$1 - for suffix in ${SUBDOMAIN_SUFFIXES[@]}; do - if [[ "$branch_name" == *$suffix ]]; then + for suffix in "${SUBDOMAIN_SUFFIXES[@]}"; do + if [[ $branch_name = *$suffix ]]; then log_error "❌ Invalid branch name ending: '$suffix'" exit 1 fi # api is one of our suffixes so makes sure [branch]-api doesn't match any other api suffixes - if [[ "$suffix" == *-api && $branch_name-api == *$suffix ]]; then + if [[ $suffix = *-api && $branch_name-api = *$suffix ]]; then log_error "❌ Invalid branch name ending: '$suffix'" exit 1 fi @@ -47,8 +48,8 @@ function validate_name_chars() { exit 1 fi - for character in ${INVALID_CHARS[@]}; do - if [[ "$branch_name" == *"$character"* ]]; then + for character in "${INVALID_CHARS[@]}"; do + if [[ $branch_name = *"$character"* ]]; then log_error "❌ Invalid character in branch name: '$character'" exit 1 fi @@ -56,9 +57,9 @@ function validate_name_chars() { } branch_name=$(get_branch_name) -validate_name_ending $branch_name -validate_name_length $branch_name -validate_name_chars $branch_name +validate_name_ending "$branch_name" +validate_name_length "$branch_name" +validate_name_chars "$branch_name" log_success "✔ Branch name is valid!" exit 0 diff --git a/packages/devops/scripts/ci/validateNewMigrations.sh b/packages/devops/scripts/ci/validateNewMigrations.sh index cd63364522..8c81b96792 100755 --- a/packages/devops/scripts/ci/validateNewMigrations.sh +++ b/packages/devops/scripts/ci/validateNewMigrations.sh @@ -1,15 +1,16 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -ROOT="${DIR}/../../../../" +ROOT="$DIR/../../../../" -. ${DIR}/utils.sh +. "$DIR/utils.sh" function get_date_command() { - if [[ $(uname) == "Darwin" ]]; then - echo "gdate" # install gdate on MacOs: brew install coreutils + if [[ $(uname) = Darwin ]]; then + echo 'gdate' # install gdate on MacOs: brew install coreutils else - echo "date" + echo 'date' fi } @@ -18,7 +19,7 @@ date_command=$(get_date_command) function convert_timestamp_to_date() { local timestamp=$1 local date=$($date_command -d @$timestamp '+%Y-%m-%d') - echo $date + echo "$date" } @@ -34,7 +35,7 @@ function check_migration_outdated() { day=${migration_name:39:2} migration_timestamp=$($date_command -d "${year}-${month}-${day}" +%s) - if (( $migration_timestamp < $included_migrations_timestamp )); then + if (( migration_timestamp < included_migrations_timestamp )); then log_error "❌ New migration should be created after $valid_migration_date. Invalid migration name: '$migration_name'" fi } @@ -47,24 +48,24 @@ function validate_migrations(){ local errors=""; while read -r migration_name; do - if [[ "$migration_name" == "" ]]; then + if [[ $migration_name = '' ]]; then break fi errors="$errors$(check_migration_outdated "$migration_name")" done <<< "$new_migration_names_in_string" - if [[ "$errors" != "" ]]; then - echo $errors; + if [[ $errors != '' ]]; then + echo "$errors"; exit 1; fi } current_branch_name=$(get_branch_name) -origin_branch_name="master" +origin_branch_name='master' # Skip validation if current branch name is master -if [[ "$current_branch_name" == "$origin_branch_name" ]]; then - echo "Skipping validation step while current branch is the same as origin" +if [[ $current_branch_name = "$origin_branch_name" ]]; then + echo 'Skipping validation step while current branch is the same as origin' exit 0 fi @@ -73,11 +74,11 @@ fi git remote remove origin git remote add origin https://github.com/beyondessential/tupaia.git # Remove this sub module because it uses ssh -git rm $ROOT/packages/data-api/scripts/pg-mv-fast-refresh +git rm "$ROOT/packages/data-api/scripts/pg-mv-fast-refresh" git fetch --quiet -git fetch origin $origin_branch_name:$origin_branch_name --quiet -validate_migrations $current_branch_name $origin_branch_name +git fetch origin "$origin_branch_name:$origin_branch_name" --quiet +validate_migrations "$current_branch_name" "$origin_branch_name" log_success "✔ New migrations are valid!" -exit 0 \ No newline at end of file +exit 0 diff --git a/packages/devops/scripts/ci/validateTests.sh b/packages/devops/scripts/ci/validateTests.sh index beb5358aa2..566db82635 100755 --- a/packages/devops/scripts/ci/validateTests.sh +++ b/packages/devops/scripts/ci/validateTests.sh @@ -1,6 +1,7 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -ROOT="${DIR}/../../../../" +ROOT="$DIR/../../../../" -node ${ROOT}/scripts/node/validateTests +node "$ROOT/scripts/node/validateTests" diff --git a/packages/devops/scripts/ci/validateTypesAndDbSchemaInSync.sh b/packages/devops/scripts/ci/validateTypesAndDbSchemaInSync.sh index baf9446d62..3658746931 100755 --- a/packages/devops/scripts/ci/validateTypesAndDbSchemaInSync.sh +++ b/packages/devops/scripts/ci/validateTypesAndDbSchemaInSync.sh @@ -1,10 +1,11 @@ -#!/bin/bash -ex +#!/usr/bin/env bash +set -ex SCRIPT_DIR=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) # Load environment variables from .env files -source $SCRIPT_DIR/../../../../scripts/bash/mergeEnvForDB.sh +source "$SCRIPT_DIR/../../../../scripts/bash/mergeEnvForDB.sh" cd "$SCRIPT_DIR" diff --git a/packages/devops/scripts/deployment-aws/buildDeployablePackages.sh b/packages/devops/scripts/deployment-aws/buildDeployablePackages.sh index 3fdd54f77e..346fcf65b3 100755 --- a/packages/devops/scripts/deployment-aws/buildDeployablePackages.sh +++ b/packages/devops/scripts/deployment-aws/buildDeployablePackages.sh @@ -4,7 +4,7 @@ DIR=$(dirname "$0") TUPAIA_DIR=$DIR/../../../.. DEPLOYMENT_NAME=$1 -echo "Building deployable packages" +echo 'Building deployable packages' PACKAGES=$(${TUPAIA_DIR}/scripts/bash/getDeployablePackages.sh) # Initialise NVM (which sets the path for access to npm, yarn etc. as well) diff --git a/packages/devops/scripts/deployment-aws/checkoutLatest.sh b/packages/devops/scripts/deployment-aws/checkoutLatest.sh index 67c03c8ddb..579a651516 100755 --- a/packages/devops/scripts/deployment-aws/checkoutLatest.sh +++ b/packages/devops/scripts/deployment-aws/checkoutLatest.sh @@ -1,4 +1,5 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e # Get latest code and dependencies DIR=$(dirname "$0") @@ -20,4 +21,4 @@ git reset --hard # clear out any manual changes that have been made, which would git checkout ${BRANCH_TO_USE} git reset --hard origin/${BRANCH_TO_USE} -echo "Checked out latest code" +echo 'Checked out latest code' diff --git a/packages/devops/scripts/deployment-aws/configureNginx.sh b/packages/devops/scripts/deployment-aws/configureNginx.sh index 41d6d8a843..00ef86c239 100755 --- a/packages/devops/scripts/deployment-aws/configureNginx.sh +++ b/packages/devops/scripts/deployment-aws/configureNginx.sh @@ -1,4 +1,5 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") TUPAIA_DIR=$DIR/../../../.. diff --git a/packages/devops/scripts/deployment-aws/startCloudwatchAgent.sh b/packages/devops/scripts/deployment-aws/startCloudwatchAgent.sh index c19d2692e9..d200d8bb83 100755 --- a/packages/devops/scripts/deployment-aws/startCloudwatchAgent.sh +++ b/packages/devops/scripts/deployment-aws/startCloudwatchAgent.sh @@ -1,4 +1,4 @@ #!/bin/bash -le DIR=$(dirname "$0") -echo "Turning on cloudwatch agent" +echo 'Turning on cloudwatch agent' /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl -a fetch-config -m ec2 -c file:$DIR/cloudwatchConfig.json -s diff --git a/packages/devops/scripts/deployment-common/startBackEnds.sh b/packages/devops/scripts/deployment-common/startBackEnds.sh index 988e02a433..857d345c67 100755 --- a/packages/devops/scripts/deployment-common/startBackEnds.sh +++ b/packages/devops/scripts/deployment-common/startBackEnds.sh @@ -9,13 +9,13 @@ PACKAGES=$(${TUPAIA_DIR}/scripts/bash/getDeployablePackages.sh) # Start back end server packages for PACKAGE in ${PACKAGES[@]}; do - if [[ $PACKAGE == *server ]]; then - if [[ $PACKAGE == 'central-server' ]]; then + if [[ $PACKAGE = *server ]]; then + if [[ $PACKAGE = central-server ]]; then # reset cwd back to `/tupaia` cd ${TUPAIA_DIR} # ensure that the analytics table is fully built - echo "Building analytics table" + echo 'Building analytics table' yarn workspace @tupaia/data-api install-mv-refresh yarn workspace @tupaia/data-api patch-mv-refresh up yarn workspace @tupaia/data-api build-analytics-table @@ -28,7 +28,7 @@ for PACKAGE in ${PACKAGES[@]}; do echo "Starting ${PACKAGE}" cd ${TUPAIA_DIR}/packages/$PACKAGE REPLICATION_PM2_CONFIG='' - if [ $PACKAGE == "web-config-server" ] || [ $PACKAGE == "report-server" ] ; then + if [[ $PACKAGE = web-config-server || $PACKAGE = report-server ]] ; then # as many replicas as cpu cores - 1 REPLICATION_PM2_CONFIG='-i -1' fi @@ -44,4 +44,4 @@ pm2 save # Log dump file grep status /home/ubuntu/.pm2/dump.pm2 -echo "Finished deploying latest" +echo 'Finished deploying latest' diff --git a/packages/devops/scripts/lambda/resources/startupTupaia.sh b/packages/devops/scripts/lambda/resources/startupTupaia.sh index c0c3ca3c74..67f9b91747 100755 --- a/packages/devops/scripts/lambda/resources/startupTupaia.sh +++ b/packages/devops/scripts/lambda/resources/startupTupaia.sh @@ -25,7 +25,7 @@ BRANCH=$(${DEPLOYMENT_SCRIPTS}/../utility/getEC2TagValue.sh Branch) echo "Starting up ${DEPLOYMENT_NAME} (${BRANCH})" # Set bash prompt to have deployment name in it -if [[ $DEPLOYMENT_NAME == "production" ]]; then +if [[ $DEPLOYMENT_NAME = production ]]; then BASH_PROMPT_NAME="PROD" BASH_PROMPT_COLOR="31" else @@ -40,15 +40,15 @@ mkdir -m 777 -p $LOGS_DIR # Turn on cloudwatch agent for prod and dev (can be turned on manually if needed on feature instances) # TODO currently broken -# if [[ $DEPLOYMENT_NAME == "production" || $DEPLOYMENT_NAME == "dev" ]]; then +# if [[ $DEPLOYMENT_NAME = production || $DEPLOYMENT_NAME = dev ]]; then # $DEPLOYMENT_SCRIPTS/startCloudwatchAgent.sh |& while IFS= read -r line; do printf '\%s \%s\n' "$(date)" "$line"; done >> $LOGS_DIR/deployment_log.txt # fi # Add preaggregation cron job if production -if [[ $DEPLOYMENT_NAME == "production" ]]; then +if [[ $DEPLOYMENT_NAME = production ]]; then \. "$HOME_DIR/.nvm/nvm.sh" # Load nvm so node is available on $PATH sudo -u ubuntu echo "10 13 * * * PATH=$PATH $HOME_DIR/tupaia/packages/web-config-server/run_preaggregation.sh | while IFS= read -r line; do printf '\%s \%s\\n' \"\$(date)\" \"\$line\"; done > $LOGS_DIR/preaggregation.txt" > tmp.cron - sudo -u ubuntu crontab -l >> tmp.cron || echo "" >> tmp.cron + sudo -u ubuntu crontab -l >> tmp.cron || echo >> tmp.cron sudo -u ubuntu crontab tmp.cron rm tmp.cron fi @@ -56,7 +56,7 @@ fi # Fetch the latest code cd $TUPAIA_DIR BRANCH_ON_REMOTE=$(sudo -Hu ubuntu git ls-remote --heads origin ${BRANCH}) -if [[ $BRANCH_ON_REMOTE == *${BRANCH} ]]; then +if [[ $BRANCH_ON_REMOTE = *${BRANCH} ]]; then echo "${BRANCH} exists" BRANCH_TO_USE=${BRANCH} else diff --git a/packages/devops/scripts/utility/getEC2TagValue.sh b/packages/devops/scripts/utility/getEC2TagValue.sh index e5db368407..bb3ebade2b 100755 --- a/packages/devops/scripts/utility/getEC2TagValue.sh +++ b/packages/devops/scripts/utility/getEC2TagValue.sh @@ -1,4 +1,5 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e # Get the stage tag of this ec2 instance from AWS TAG_NAME=$1 diff --git a/scripts/bash/ansiControlSequences.sh b/scripts/bash/ansiControlSequences.sh new file mode 100644 index 0000000000..3a6cda434f --- /dev/null +++ b/scripts/bash/ansiControlSequences.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# Defines (and exports) a handful of the more widely supported ANSI control sequences for +# manipulating font and colour when writing to stdout. Honours the NO_COLOR environment variable if +# it is set. +# +# EXAMPLE USAGE +# source "path/to/ansiControlSequence.sh" +# echo -e "${BOLD}${WHITE}${ON_RED}ERROR${RESET} File does not exist." +# printf 'See %bhttp://bes.au%b for more info.\n' "$MAGENTA" "$RESET" +# +# REMARKS +# - If using echo, remember to use -e. Otherwise, the ANSI codes will be printed literally. +# - Avoid using variables in the printf format string: https://github.com/koalaman/shellcheck/wiki/SC2059 +# +# REFERENCE +# https://en.wikipedia.org/wiki/ANSI_escape_code + +# ANSI control sequences + +export CURSOR_UP='\033[A' +export CURSOR_DOWN='\033[B' +export CURSOR_FORWARD='\033[C' +export CURSOR_BACK='\033[D' +export CURSOR_NEXT_LINE='\033[E' +export CURSOR_PREV_LINE='\033[F' +export CURSOR_START_OF_LINE='\033[G' +export CLEAR_LINE="\033[2K${CURSOR_START_OF_LINE}" + +# Select graphic rendition (SGR) parameters + +if [[ $NO_COLOR != '' ]]; then + # See https://no-color.org + export RESET='' + export BOLD='' + export UNDERLINE='' + export BLACK='' + export RED='' + export GREEN='' + export YELLOW='' + export BLUE='' + export MAGENTA='' + export CYAN='' + export WHITE='' + export ON_BLACK='' + export ON_RED='' + export ON_GREEN='' + export ON_YELLOW='' + export ON_BLUE='' + export ON_MAGENTA='' + export ON_CYAN='' + export ON_WHITE='' +else + export RESET='\033[m' + export BOLD='\033[1m' + export UNDERLINE='\033[4m' + export BLACK='\033[30m' + export RED='\033[31m' + export GREEN='\033[32m' + export YELLOW='\033[33m' + export BLUE='\033[34m' + export MAGENTA='\033[35m' + export CYAN='\033[36m' + export WHITE='\033[37m' + export ON_BLACK='\033[40m' + export ON_RED='\033[41m' + export ON_GREEN='\033[42m' + export ON_YELLOW='\033[43m' + export ON_BLUE='\033[44m' + export ON_MAGENTA='\033[45m' + export ON_CYAN='\033[46m' + export ON_WHITE='\033[47m' +fi diff --git a/scripts/bash/backendStartDev.sh b/scripts/bash/backendStartDev.sh index 214e413b07..68c0e9fe9c 100755 --- a/scripts/bash/backendStartDev.sh +++ b/scripts/bash/backendStartDev.sh @@ -1,16 +1,16 @@ -#!/bin/bash -e - -## -# usage: +#!/usr/bin/env bash +# Usage: # $1 - port to run babel inspector on # Optionally provide '-i' or '--include-internal' to include build and watching internal dependencies # Optionally provide '-ts' or '--typescript' to start typescript server -## -USAGE="Usage: backendStartDev babel_port_inspector [-i --include-internal] [-ts --typescript]" +set -e + DIR=$(dirname "$0") -CONCURRENTLY_BIN="${DIR}/../../node_modules/.bin/concurrently" -watch_flags="" +. "$DIR/ansiControlSequences.sh" + +USAGE="Usage: ${BOLD}backendStartDev babel_port_inspector${RESET} [${BOLD}-i${RESET}|${BOLD}--include-internal${RESET}] [${BOLD}-ts${RESET}|${BOLD}--typescript${RESET}]" +watch_flags='' include_internal=false type_script=false inspect_port=${1} @@ -18,7 +18,7 @@ inspect_port=${1} # Start server command for JS start_server="nodemon -w src --exec \"babel-node src --inspect=${inspect_port} --config-file '../../babel.config.json'\"" -while [ "$2" != "" ]; do +while [[ $2 != '' ]]; do case $2 in -ts | --typescript) type_script=true @@ -29,35 +29,34 @@ while [ "$2" != "" ]; do shift ;; -s | --skip-internal) - echo "Skipping internal dependencies is now done by default. Remove the -s | --skip-internal flag, and if you want to include internal dependencies, add a -i (do try it - it's a lot faster than it used to be, because it only builds those relevant to the current package!)" + echo -e "Skipping internal dependencies is now done by default. Remove the ${BOLD}--skip-internal${RESET} (${BOLD}-s${RESET}) flag, and if you want to include internal dependencies, add a ${BOLD}-i${RESET}. (Do try it - it’s a lot faster than it used to be, because it only builds those relevant to the current package$EXCLAMATION_MARK)" exit 1 ;; *) - echo $USAGE + echo -e "$USAGE" exit 1 ;; esac done # Start server command for TS -if [[ ${type_script} == true ]]; then +if [[ $type_script = true ]]; then start_server="nodemon --watch src -e ts,json,js --exec node --inspect=${inspect_port} -r ts-node/register src/index.ts" fi -echo "Starting server" +echo -e "${BOLD}Starting server...${RESET}" # If internal dependencies are included, add them to the watch list. This will watch for changes to the dist folder of each internal dependency. If the internal dependency then gets rebuilt, the server will restart. -if [[ ${include_internal} == true ]]; then - echo "Internal dependencies are under watch for hot reload" - for PACKAGE in $(${DIR}/getInternalDependencies.sh); do - watch_flags="${watch_flags} --watch ../${PACKAGE}/dist" +if [[ $include_internal = true ]]; then + echo 'Internal dependencies are under watch for hot reload' + for PACKAGE in $("$DIR/getInternalDependencies.sh"); do + watch_flags+=" --watch ../$PACKAGE/dist" done # add the watch flags to the server start process, as well as a 1 second delay to debounce the # many restarts that otherwise happen during the initial build of internal dependencies - start_server="${start_server} --delay 1 ${watch_flags}" - + start_server+=" --delay 1 $watch_flags" else - echo "Starting server without internal dependency build and watch. To include internal dependencies, add the -i flag - it's much faster than it used to be!" - + echo -e "Starting server without internal dependency build and watch. To include internal dependencies, add the ${BOLD}-i${RESET} flag - it’s much faster than it used to be!" fi -eval ${start_server} + +eval "$start_server" diff --git a/scripts/bash/buildInternalDependencies.sh b/scripts/bash/buildInternalDependencies.sh index 01f2aadab1..9fd3591739 100755 --- a/scripts/bash/buildInternalDependencies.sh +++ b/scripts/bash/buildInternalDependencies.sh @@ -1,15 +1,17 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") +. "$DIR/ansiControlSequences.sh" CONCURRENT_BUILD_BATCH_SIZE=1 CONCURRENTLY_BIN="${DIR}/../../node_modules/.bin/concurrently" -USAGE="Usage: \033[1mbuildInternalDependencies.sh\033[m [\033[1m--watch\033[m] [\033[1m--packagePath\033[m|\033[1m-p\033[m]" +USAGE="Usage: ${BOLD}buildInternalDependencies.sh${RESET} [${BOLD}--watch${RESET}] [${BOLD}--packagePath${RESET}|${BOLD}-p${RESET}]" watch=false package_path="" -while [ "$1" != "" ]; do +while [[ $1 != '' ]]; do case $1 in --watch) shift @@ -31,26 +33,26 @@ while [ "$1" != "" ]; do esac done -[[ $watch = "true" ]] && build_args="--watch" || build_args="" -[[ $watch = "true" ]] && build_ts_args="--watch --preserveWatchOutput" || build_ts_args="" +[[ $watch = true ]] && build_args='--watch' || build_args='' +[[ $watch = true ]] && build_ts_args='--watch --preserveWatchOutput' || build_ts_args='' build_commands=() build_prefixes=() # Build dependencies -for PACKAGE in $(${DIR}/getInternalDependencies.sh ${package_path}); do - build_commands+=("\"NODE_ENV=production yarn workspace @tupaia/${PACKAGE} build-dev $build_args\"") - build_prefixes+=("${PACKAGE},") +for PACKAGE in $("$DIR/getInternalDependencies.sh" "$package_path"); do + build_commands+=("\"NODE_ENV=production yarn workspace @tupaia/$PACKAGE build-dev $build_args\"") + build_prefixes+=("$PACKAGE,") done -if [[ $watch == "true" ]]; then - echo -e "\033[1mConcurrently building and watching all internal dependencies\033[m" +if [[ $watch = true ]]; then + echo -e "${BOLD}Concurrently building and watching all internal dependencies${RESET}" echo "> ${CONCURRENTLY_BIN} --names \"${build_prefixes[*]}\" ${build_commands[@]}" - echo "" + echo eval "${CONCURRENTLY_BIN} --names \"${build_prefixes[*]}\" ${build_commands[@]}" else - echo -e "\033[1mConcurrently building internal dependencies in batches of ${CONCURRENT_BUILD_BATCH_SIZE}\033[m" - echo "> ${CONCURRENTLY_BIN} -m $CONCURRENT_BUILD_BATCH_SIZE --names \"${build_prefixes[*]}\" -k ${build_commands[*]}" - echo "" - eval "${CONCURRENTLY_BIN} -m $CONCURRENT_BUILD_BATCH_SIZE --names \"${build_prefixes[*]}\" -k ${build_commands[*]}" + echo -e "${BOLD}Concurrently building internal dependencies in batches of ${CONCURRENT_BUILD_BATCH_SIZE}${RESET}" + echo "> $CONCURRENTLY_BIN -m $CONCURRENT_BUILD_BATCH_SIZE --names \"${build_prefixes[*]}\" -k ${build_commands[*]}" + echo + eval "$CONCURRENTLY_BIN -m $CONCURRENT_BUILD_BATCH_SIZE --names \"${build_prefixes[*]}\" -k ${build_commands[*]}" fi diff --git a/scripts/bash/buildNonInternalDependencies.sh b/scripts/bash/buildNonInternalDependencies.sh index 1d2679f2b7..1c9a07779b 100755 --- a/scripts/bash/buildNonInternalDependencies.sh +++ b/scripts/bash/buildNonInternalDependencies.sh @@ -1,6 +1,23 @@ -#!/bin/bash -ex +#!/usr/bin/env bash +set -ex -PACKAGES="report-server admin-panel-server central-server data-table-server datatrak-web datatrak-web-server entity-server lesmis lesmis-server meditrak-app-server psss psss-server web-config-server tupaia-web tupaia-web-server" +PACKAGES=( + 'report-server' + 'admin-panel-server' + 'central-server' + 'data-table-server' + 'datatrak-web' + 'datatrak-web-server' + 'entity-server' + 'lesmis' + 'lesmis-server' + 'meditrak-app-server' + 'psss' + 'psss-server' + 'web-config-server' + 'tupaia-web' + 'tupaia-web-server' +) CONCURRENT_BUILD_BATCH_SIZE=1 @@ -8,9 +25,9 @@ build_commands=() build_prefixes=() # Build dependencies -for PACKAGE in $PACKAGES; do - build_commands+=("\"yarn workspace @tupaia/${PACKAGE} build\"") - build_prefixes+=("${PACKAGE},") +for PACKAGE in "${PACKAGES[@]}"; do + build_commands+=("\"yarn workspace @tupaia/$PACKAGE build\"") + build_prefixes+=("$PACKAGE,") done eval "yarn concurrently -m $CONCURRENT_BUILD_BATCH_SIZE --names \"${build_prefixes[*]}\" -k ${build_commands[*]}" diff --git a/scripts/bash/downloadEnvironmentVariables.sh b/scripts/bash/downloadEnvironmentVariables.sh index ad382b1693..953f83622e 100755 --- a/scripts/bash/downloadEnvironmentVariables.sh +++ b/scripts/bash/downloadEnvironmentVariables.sh @@ -1,97 +1,96 @@ -#!/bin/bash -e -set +x # do not output commands in this script, as some would show credentials in plain text +#!/usr/bin/env bash +set -e +x # Do not output commands in this script, as some would show credentials in plain text DEPLOYMENT_NAME=$1 DIR=$(dirname "$0") -COLLECTION_PATH="Engineering/Tupaia General/Environment Variables" # Collection in BitWarden where .env vars are kept +. "$DIR/ansiControlSequences.sh" +# Collection in BitWarden where .env vars are kept +COLLECTION_PATH='Engineering/Tupaia General/Environment Variables' -# can provide one or more packages as command line arguments, or will default to all -if [ -z $2 ]; then - echo "Fetching all .env files" - PACKAGES=$(${DIR}/getPackagesWithEnvFiles.sh) +# Log in to bitwarden +echo -e "${BLUE}==>️${RESET} ${BOLD}Logging into Bitwarden${RESET}" +bw login --check || bw login "$BITWARDEN_EMAIL" "$BITWARDEN_PASSWORD" +eval "$(bw unlock "$BITWARDEN_PASSWORD" | grep -o -m 1 'export BW_SESSION=.*$')" + +COLLECTION_ID=$(bw get collection "$COLLECTION_PATH" | jq .id) + +echo + +# Can provide one or more packages as command line arguments, or will default to all +if [[ -z $2 ]]; then + PACKAGES=$("$DIR/getPackagesWithEnvFiles.sh") + echo -e "${BLUE}==>️${RESET} ${BOLD}Fetching environment variables for all packages${RESET}" else - PACKAGES=${@:2} - echo "Fetching environment variables for ${PACKAGES}" + PACKAGES=("${@:2}") + echo -e "${BLUE}==>️${RESET} ${BOLD}Fetching environment variables for ${PACKAGES[*]}${RESET}" fi -# Login to bitwarden -bw login --check || bw login $BITWARDEN_EMAIL $BITWARDEN_PASSWORD -eval "$(bw unlock $BITWARDEN_PASSWORD | grep -o -m 1 'export BW_SESSION=.*$')" - -COLLECTION_ID=$(bw get collection "$COLLECTION_PATH" | jq .id) load_env_file_from_bw () { FILE_NAME=$1 BASE_FILE_PATH=$2 NEW_FILE_NAME=$3 - ENV_FILE_PATH=${BASE_FILE_PATH}/${NEW_FILE_NAME}.env - - echo "Fetching environment variables for $FILE_NAME: $ENV_FILE_PATH" + ENV_FILE_PATH=$BASE_FILE_PATH/$NEW_FILE_NAME.env - echo "Fetching environment variables for $FILE_NAME" + echo -en "${YELLOW}🚚 Fetching variables for ${BOLD}${FILE_NAME}...${RESET}" # checkout deployment specific env vars, or dev as fallback - DEPLOYMENT_ENV_VARS=$(bw list items --search ${FILE_NAME}.${DEPLOYMENT_NAME}.env | jq --raw-output "map(select(.collectionIds[] | contains ($COLLECTION_ID))) | .[] .notes") + DEPLOYMENT_ENV_VARS=$(bw list items --search "$FILE_NAME.$DEPLOYMENT_NAME.env" | jq --raw-output "map(select(.collectionIds[] | contains ($COLLECTION_ID))) | .[] .notes") - - if [ -n "$DEPLOYMENT_ENV_VARS" ]; then - echo "$DEPLOYMENT_ENV_VARS" > ${ENV_FILE_PATH} + if [[ -n $DEPLOYMENT_ENV_VARS ]]; then + echo "$DEPLOYMENT_ENV_VARS" > "$ENV_FILE_PATH" else - DEV_ENV_VARS=$(bw list items --search ${FILE_NAME}.dev.env | jq --raw-output "map(select(.collectionIds[] | contains ($COLLECTION_ID))) | .[] .notes") - echo "$DEV_ENV_VARS" > ${ENV_FILE_PATH} + DEV_ENV_VARS=$(bw list items --search "$FILE_NAME.dev.env" | jq --raw-output "map(select(.collectionIds[] | contains ($COLLECTION_ID))) | .[] .notes") + echo "$DEV_ENV_VARS" > "$ENV_FILE_PATH" fi - # Replace any instances of the placeholder [deployment-name] in the .env file with the actual deployment - # name (e.g. [deployment-name]-api.tupaia.org -> specific-deployment-api.tupaia.org) - sed -i -e "s/\[deployment-name\]/${DEPLOYMENT_NAME}/g" "${ENV_FILE_PATH}" - + # Replace any instances of the placeholder [deployment-name] in the .env file with the actual + # deployment name (e.g. [deployment-name]-api.tupaia.org -> specific-deployment-api.tupaia.org) + sed -i -e "s/\[deployment-name\]/$DEPLOYMENT_NAME/g" "$ENV_FILE_PATH" if [[ -v DOMAIN ]]; then # Replace the placeholder [domain] - sed -i -e "s/\[domain\]/${DOMAIN}/g" ${ENV_FILE_PATH} + sed -i -e "s/\[domain\]/$DOMAIN/g" "$ENV_FILE_PATH" fi - if [[ "${DEPLOYMENT_NAME}" == *-e2e || "${DEPLOYMENT_NAME}" == e2e ]]; then + if [[ $DEPLOYMENT_NAME = *-e2e || $DEPLOYMENT_NAME = e2e ]]; then # Update e2e environment variables - if [[ ${FILE_NAME} == "aggregation" ]]; then - sed -i -e 's/^AGGREGATION_URL_PREFIX="?dev-"?$/AGGREGATION_URL_PREFIX=e2e-/g' ${ENV_FILE_PATH} + if [[ $FILE_NAME = aggregation ]]; then + sed -i -e 's/^AGGREGATION_URL_PREFIX="?dev-"?$/AGGREGATION_URL_PREFIX=e2e-/g' "$ENV_FILE_PATH" fi fi - if [[ "${DEPLOYMENT_NAME}" == dev ]]; then + if [[ $DEPLOYMENT_NAME = dev ]]; then # Update dev specific environment variables # (removes ###DEV_ONLY### prefixes, leaving the key=value pair uncommented) # (after removing prefix, if there are duplicate keys, dotenv uses the last one in the file) - sed -i -e 's/^###DEV_ONLY###//g' ${ENV_FILE_PATH} + sed -i -e 's/^###DEV_ONLY###//g' "$ENV_FILE_PATH" fi - - echo "downloaded .env vars for $FILE_NAME" + echo -en "$CLEAR_LINE" + echo -e "${GREEN}✅ Downloaded variables for ${BOLD}${FILE_NAME}${RESET} → $ENV_FILE_PATH" } -for PACKAGE in $PACKAGES; do - # only download the env file if there is an example file in the package. If there isn't, this means it is a package that doesn't need env vars - has_example_env_in_package=$(find $DIR/../../packages/$PACKAGE -type f -name '*.env.example' | wc -l) - if [ $has_example_env_in_package -eq 1 ]; then - load_env_file_from_bw $PACKAGE $DIR/../../packages/$PACKAGE "" +for PACKAGE in "${PACKAGES[@]}"; do + # Only download the env file if there is an example file in the package. If there isn’t, this + # means it is a package that doesn’t need env vars + has_example_env_in_package=$(find "$DIR/../../packages/$PACKAGE" -type f -name '*.env.example' | wc -l) + if (( has_example_env_in_package > 0 )); then + load_env_file_from_bw "$PACKAGE" "$DIR/../../packages/$PACKAGE" '' fi done -# get all .env.*.example files in the env directory -file_names=$(find $DIR/../../env -type f -name '*.env.example' -exec basename {} \;) - - -# for each file, get the extract the filename without the .example extension -for file_name in $file_names; do - env_name=$(echo $file_name | sed 's/\.env.example//') - load_env_file_from_bw $env_name $DIR/../../env $env_name +for file_name in *.env.example; do + env_name="${file_name%.env.example}" # Get its basename without the .env.example extension + load_env_file_from_bw "$env_name" "$DIR/../../env" "$env_name" done - - +# Log out of Bitwarden +echo +echo -e "${BLUE}==>️${RESET} ${BOLD}Logging out of Bitwarden${RESET}" bw logout diff --git a/scripts/bash/getDeployablePackages.sh b/scripts/bash/getDeployablePackages.sh index ed0d014cd4..c762d1c50d 100755 --- a/scripts/bash/getDeployablePackages.sh +++ b/scripts/bash/getDeployablePackages.sh @@ -1,4 +1,24 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e + +DEPLOYABLE_PACKAGES=( + 'admin-panel' + 'lesmis' + 'psss' + 'datatrak-web' + 'tupaia-web' + 'central-server' + 'data-table-server' + 'datatrak-web-server' + 'entity-server' + 'lesmis-server' + 'meditrak-app-server' + 'psss-server' + 'report-server' + 'tupaia-web-server' + 'web-config-server' + 'admin-panel-server' # admin-panel-server last as it depends on report-server +) +echo "${DEPLOYABLE_PACKAGES[@]}" -echo "admin-panel" "lesmis" "psss" "datatrak-web" "tupaia-web" "central-server" "data-table-server" "datatrak-web-server" "entity-server" "lesmis-server" "meditrak-app-server" "psss-server" "report-server" "tupaia-web-server" "web-config-server" "admin-panel-server" # admin-panel-server last as it depends on report-server exit 0 diff --git a/scripts/bash/getInternalDependencies.sh b/scripts/bash/getInternalDependencies.sh index c38095854e..d7941d02db 100755 --- a/scripts/bash/getInternalDependencies.sh +++ b/scripts/bash/getInternalDependencies.sh @@ -1,7 +1,8 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -if [ "$1" != "" ]; then +if [[ $1 != '' ]]; then # pop the package_path off, and interpret the rest as dependencies that have been checked earlier # in the recursion package_path=$1 @@ -11,14 +12,36 @@ fi dependencies_already_visited=($@) # if no package.json entrypoint is specified, just return all internal dependencies -if [ -z ${package_path} ]; then - echo "types" "utils" "tsutils" "ui-components" "ui-chart-components" "ui-map-components" "server-utils" "access-policy" "admin-panel" "aggregator" "api-client" "auth" "database" "data-api" "dhis-api" "data-lake-api" "expression-parser" "indicators" "weather-api" "kobo-api" "superset-api" "data-broker" "server-boilerplate" +if [[ -z $package_path ]]; then + echo 'types' \ + 'utils' \ + 'tsutils' \ + 'ui-components' \ + 'ui-chart-components' \ + 'ui-map-components' \ + 'server-utils' \ + 'access-policy' \ + 'admin-panel' \ + 'aggregator' \ + 'api-client' \ + 'auth' \ + 'database' \ + 'data-api' \ + 'dhis-api' \ + 'data-lake-api' \ + 'expression-parser' \ + 'indicators' \ + 'weather-api' \ + 'kobo-api' \ + 'superset-api' \ + 'data-broker' \ + 'server-boilerplate' exit 0 fi # we are getting internal dependencies for a specific package.json internal_dependencies=($(sed -n '/"dependencies": {/,/}/p' ${PWD}/${package_path}/package.json | grep -o '@tupaia/[^"]*": "[0-9\.]*"' | cut -d / -f 2 | cut -d \" -f 1)) -if [ ${#internal_dependencies[@]} -eq 0 ]; then +if (( ${#internal_dependencies[@]} = 0 )); then exit 0 # no internal dependencies of this package, early return fi @@ -37,10 +60,10 @@ internal_dependencies=("${array_without_gaps[@]}") unset array_without_gaps # recursively build up an array of all internal dependencies this package depends on -for dependency in ${internal_dependencies[@]}; do +for dependency in "${internal_dependencies[@]}"; do nested_dependencies=($(${DIR}/getInternalDependencies.sh "${package_path}/../${dependency}" ${dependencies_already_visited[@]} ${internal_dependencies[@]} )) - if [ ${#nested_dependencies[@]} -eq 0 ]; then + if (( ${#nested_dependencies[@]} = 0 )); then continue fi @@ -49,10 +72,9 @@ for dependency in ${internal_dependencies[@]}; do done # remove any duplicates -deduplicated_union=() for i in "${!internal_dependencies[@]}"; do for j in "${!internal_dependencies[@]}"; do - if [[ i -ne j ]] && [[ ${internal_dependencies[i]} = ${internal_dependencies[j]} ]]; then + if (( i != j )) && [[ ${internal_dependencies[i]} = ${internal_dependencies[j]} ]]; then unset 'internal_dependencies[i]' fi done @@ -64,6 +86,6 @@ internal_dependencies=("${array_without_gaps[@]}") unset array_without_gaps # echo out result for calling script to pick up -echo ${internal_dependencies[@]} +echo "${internal_dependencies[@]}" exit 0 diff --git a/scripts/bash/getPackagesWithEnvFiles.sh b/scripts/bash/getPackagesWithEnvFiles.sh index 88b2038599..b87e874e64 100755 --- a/scripts/bash/getPackagesWithEnvFiles.sh +++ b/scripts/bash/getPackagesWithEnvFiles.sh @@ -1,9 +1,11 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -# packages with .env files are (currently) all deployable, plus auth, data-api, and database -PACKAGES=$(${DIR}/getDeployablePackages.sh) -PACKAGES+=" data-api viz-test-tool" -echo $PACKAGES +# Packages with .env files are (currently) all deployable, plus auth, data-api, and database +PACKAGES=$("$DIR/getDeployablePackages.sh") +PACKAGES+=('data-api' 'viz-test-tool') +echo "${PACKAGES[@]}" + exit 0 diff --git a/scripts/bash/mergeEnvForDB.sh b/scripts/bash/mergeEnvForDB.sh index b8db6343c2..138a9be173 100755 --- a/scripts/bash/mergeEnvForDB.sh +++ b/scripts/bash/mergeEnvForDB.sh @@ -1,10 +1,11 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e # Function to get the directory of the package that's calling this script get_caller_package_directory() { local dir dir=$(dirname "$(readlink -f "$0")") - while [[ "$dir" != "/" ]]; do + while [[ $dir != '/' ]]; do if [[ -f "$dir/package.json" ]]; then echo "$dir" return @@ -20,30 +21,30 @@ CALLING_SCRIPT_DIR=$(get_caller_package_directory) CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # Fixed paths to the .env files for the test db -file1="$CURRENT_DIR/../../env/db.env" -file2="$CURRENT_DIR/../../env/pg.env" -file3="$CURRENT_DIR/../../env/data-lake.env" -file4="$CALLING_SCRIPT_DIR/.env" - -common_files="$file1 $file2 $file3 $file4" - - # Remove files that don't exist -for file in $common_files; do - if [ ! -f "$file" ]; then - common_files=$(echo "$common_files" | sed "s|$file||g") - fi -done - -# Load environment variables from .env files -merged_content="$(cat $common_files)" +common_files=( + './../../env/db.env' + './../../env/pg.env' + './../../env/data-lake.env' + './.env' +) + +# Load environment variables from .env files (if they exist) +merged_content=$( + for file in "${common_files[@]}"; do + if [[ -f $file ]]; then + cat "$file" + fi + done +) # Process command line arguments, overwriting values if present for var in $(env); do - if [[ "$var" == *=* ]]; then + if [[ $var = *=* ]]; then key="${var%%=*}" value="${var#*=}" - # Override values from command line - merged_content+=" $key=\"$value\"" + # Override values from command line, with each variable on its own line + merged_content+=" +$key=\"$value\"" fi done diff --git a/scripts/bash/pm2startInline.sh b/scripts/bash/pm2startInline.sh index 5a4cf4db81..dffc6b2e43 100755 --- a/scripts/bash/pm2startInline.sh +++ b/scripts/bash/pm2startInline.sh @@ -1,15 +1,17 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e cd "$(dirname "${BASH_SOURCE[0]}")" +. ./ansiControlSequences.sh -if [ -z "$1" ]; then - echo -e "Usage: \033[1myarn start-stack\033[0m \033[4mstack\033[0m" - echo "" - echo -e "All \033[4mstack\033[0ms:" - ls -1 ../../packages/devops/configs/pm2/ | sed 's|.config.js||g' | grep -v 'base' | awk '$0=" "$0' - echo "" +if [[ -z $1 ]]; then + echo -e "Usage: ${BOLD}yarn start-stack${RESET} ${UNDERLINE}stack${RESET}" + echo + echo -e "All ${UNDERLINE}stack${RESET}s:" + ls -1 ../../packages/devops/configs/pm2/ | sed 's|.config.js||g' | grep -v 'base' | awk '$0=" "$0' + echo echo "Tips:" - echo -e " - Normal PM2 commands work e.g. \033[1myarn pm2 status\033[0m" + echo -e " - Normal PM2 commands work e.g. ${BOLD}yarn pm2 status${RESET}" echo " - Start multiple stacks by calling this command multiple times" exit 1 fi @@ -17,6 +19,6 @@ fi yarn pm2 start "../../packages/devops/configs/pm2/$1.config.js" # When user quits logs, stop everything -trap "echo -e '\n\033[1;41;97m Stopping... \033[0m' && yarn pm2 delete all" EXIT +trap 'echo -e "\n${RED}[start-stack]${RESET} Stopping..." && yarn pm2 delete all' EXIT yarn pm2 logs --lines 0 diff --git a/scripts/bash/validate.sh b/scripts/bash/validate.sh index 3e961366fa..137af302a0 100755 --- a/scripts/bash/validate.sh +++ b/scripts/bash/validate.sh @@ -1,7 +1,8 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e DIR=$(dirname "$0") -. ${DIR}/../../packages/devops/scripts/ci/utils.sh +. "$DIR/../../packages/devops/scripts/ci/utils.sh" yarn workspace @tupaia/devops validate-branch-name yarn workspace @tupaia/devops validate-tests diff --git a/scripts/docker/downloadEnvironmentVariables.sh b/scripts/docker/downloadEnvironmentVariables.sh index a10db05dc1..99ce7c7564 100755 --- a/scripts/docker/downloadEnvironmentVariables.sh +++ b/scripts/docker/downloadEnvironmentVariables.sh @@ -1,4 +1,5 @@ -#!/bin/bash -e +#!/usr/bin/env bash +set -e # Fetch environment from LastPass, write to ${ENV_DEST}/${DEPLOYMENT_NAME}/.env # Is modified from ../../scripts/bash/downloadEnvironmentVariables.sh @@ -33,8 +34,8 @@ DIR=$(dirname "$0") # can provide one or more packages as command line arguments, or will default to all -if [ -z $3 ]; then - echo "Fetching all .env files" +if [[ -z $3 ]]; then + echo 'Fetching all .env files' PACKAGES=$(${DIR}/../bash/getPackagesWithEnvFiles.sh) else PACKAGES=${@:3} @@ -54,14 +55,14 @@ for PACKAGE in $PACKAGES; do # name (e.g. [deployment-name]-api.tupaia.org -> specific-deployment-api.tupaia.org) sed -i -e "s/\[deployment-name\]/${DEPLOYMENT_NAME}/g" ${ENV_FILE_PATH} - if [[ "${DEPLOYMENT_NAME}" == *-e2e || "${DEPLOYMENT_NAME}" == e2e ]]; then + if [[ $DEPLOYMENT_NAME = *-e2e || $DEPLOYMENT_NAME = e2e ]]; then # Update e2e environment variables - if [[ ${PACKAGE} == "central-server" || ${PACKAGE} == "web-config-server" ]]; then + if [[ $PACKAGE = central-server || $PACKAGE = web-config-server ]]; then sed -i -E 's/^AGGREGATION_URL_PREFIX="?dev-"?$/AGGREGATION_URL_PREFIX=e2e-/g' ${ENV_FILE_PATH} fi fi - if [[ "${DEPLOYMENT_NAME}" == dev ]]; then + if [[ $DEPLOYMENT_NAME = 'dev' ]]; then # Update dev specific environment variables # (removes ###DEV_ONLY### prefixes, leaving the key=value pair uncommented) # (after removing prefix, if there are duplicate keys, dotenv uses the last one in the file)