diff --git a/.crux_dry_run_build b/.crux_dry_run_build deleted file mode 100644 index 7256109b..00000000 --- a/.crux_dry_run_build +++ /dev/null @@ -1,2 +0,0 @@ -AUTOBUILD - diff --git a/.github/actions/baseAction/action.yml b/.github/actions/baseAction/action.yml new file mode 100644 index 00000000..2f89d3c8 --- /dev/null +++ b/.github/actions/baseAction/action.yml @@ -0,0 +1,53 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Base Action +description: Generic base action used by workflows - Cache rush and pnpm; rush install, check and build + +runs: + using: "composite" + steps: + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + + - name: Cache Rush + uses: actions/cache@v3 + with: + path: | + common/temp/install-run + ~/.rush + key: ${{ runner.os }}-rush-${{ hashFiles('rush.json') }} + restore-keys: | + ${{ runner.os }}-rush- + ${{ runner.os }}- + + - name: Cache pnpm + uses: actions/cache@v3 + with: + path: | + common/temp/pnpm-store + key: ${{ runner.os }}-pnpm-${{ hashFiles('common/config/rush/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm- + ${{ runner.os }}- + + - name: install pnpm + uses: pnpm/action-setup@v2.2.2 + with: + version: 6.32 + + - name: Install dependecies + shell: bash + run: node common/scripts/install-run-rush.js install + + - name: Rush check + shell: bash + run: node common/scripts/install-run-rush.js check + + - name: Rush build + shell: bash + run: node common/scripts/install-run-rush.js build --verbose \ No newline at end of file diff --git a/.github/workflows/branch-protection.yml b/.github/workflows/branch-protection.yml new file mode 100644 index 00000000..1be90a45 --- /dev/null +++ b/.github/workflows/branch-protection.yml @@ -0,0 +1,30 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Branch protection +on: + pull_request: + types: [opened, synchronize, edited, reopened] +# The purpose of this workflow is to create a failing Status check on pull request against develop. This will prevent +# PR from being merged into main/stage. +jobs: + main-protection: + name: Only create PR against develop branch, not main or stage branch + runs-on: ubuntu-20.04 + steps: + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + - name: Get branch name + id: branch-name + uses: tj-actions/branch-names@v5.1 + - name: main protection + if: | + (steps.branch-name.outputs.base_ref_branch == 'main' && startsWith(steps.branch-name.outputs.head_ref_branch, 'release/') == false) || + (steps.branch-name.outputs.base_ref_branch == 'stage') + run: | + echo "PR has target branch ${{ steps.branch-name.outputs.base_ref_branch }}. Failing workflow..." + exit 1 \ No newline at end of file diff --git a/.github/workflows/build-test-coverage.yml b/.github/workflows/build-test-coverage.yml new file mode 100644 index 00000000..ead9edc0 --- /dev/null +++ b/.github/workflows/build-test-coverage.yml @@ -0,0 +1,23 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Build, test, and test-coverage +on: ['pull_request'] + +jobs: + build-test-coverage: + runs-on: ubuntu-20.04 + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Rush test + run: node common/scripts/install-run-rush.js test --verbose \ No newline at end of file diff --git a/.github/workflows/cfn-analysis.yml b/.github/workflows/cfn-analysis.yml new file mode 100644 index 00000000..764521ab --- /dev/null +++ b/.github/workflows/cfn-analysis.yml @@ -0,0 +1,73 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: CloudFormation Scanning + +on: + pull_request: + branches: + - develop + +jobs: + cfn-analyze: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Build Hapi validator + run: | + cd ./fwoa-utilities/javaHapiValidatorLambda + mvn --batch-mode --update-snapshots --no-transfer-progress clean install + + # for compile igs we would need to use node v16 due to ERR_PACKAGE_PATH_NOT_EXPORTED in elasticsearch package + - name: Use Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: '16.x' + + - name: Download US Core IG + # NOTE if updating the IG version. Please see update implementationGuides.test.ts test too. + run: | + cd ./solutions/deployment + mkdir -p implementationGuides + curl http://hl7.org/fhir/us/core/STU3.1.1/package.tgz | tar xz -C implementationGuides + cd ../smart-deployment + mkdir -p implementationGuides + curl http://hl7.org/fhir/us/core/STU3.1.1/package.tgz | tar xz -C implementationGuides + + - name: Compile IGs + run: | + npm install -g ts-node + cd ./solutions/deployment + node ../../common/scripts/install-run-rushx.js compile-igs + cd ../smart-deployment + node ../../common/scripts/install-run-rushx.js compile-igs + + - name: synthesize deployment and smart cdk template for cfn for analysis + run: | + cd ./solutions/deployment + node ../../common/scripts/install-run-rushx.js cdk synth -c enableSubscriptions=true -c useHapiValidator=true --all + + cd ../smart-deployment + node ../../common/scripts/install-run-rushx.js cdk synth -c issuerEndpoint='test' -c oAuth2ApiEndpoint='test' -c patientPickerEndpoint='test' -c enableSubscriptions=true -c useHapiValidator=true --all + + - name: cfn_nag on smart cdk template + uses: stelligent/cfn_nag@master + with: + input_path: ./solutions/smart-deployment/cdk.out/smart-fhir-service-dev.template.json + extra_args: --blacklist-path ./solutions/smart-deployment/cdk-nag-deny-list.yaml --fail-on-warnings + + - name: cfn_nag on fwoa cdk template + uses: stelligent/cfn_nag@master + with: + input_path: ./solutions/deployment/cdk.out/fhir-service-dev.template.json + extra_args: --blacklist-path ./solutions/deployment/cdk-nag-deny-list.yaml --fail-on-warnings + + + diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..9fa0247f --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,31 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Security Code Scanning + +on: + pull_request: + paths-ignore: + - '**/*.md' + - '**/*.txt' + schedule: + - cron: '0 0 * * *' + +jobs: + CodeQL-Analyze: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: javascript + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml new file mode 100644 index 00000000..81840c56 --- /dev/null +++ b/.github/workflows/create-release.yml @@ -0,0 +1,124 @@ +name: create-release +on: workflow_dispatch +env: + BOT_USER_EMAIL: ${{ vars.BOT_USER_EMAIL }} + BOT_USER_NAME: ${{ vars.BOT_USER_NAME }} +jobs: + create-release-branch: + if: ${{github.ref_name == 'stage'}} + runs-on: ubuntu-20.04 + outputs: + source_branch: ${{ steps.create_branch.outputs.source_branch }} + steps: + - uses: actions-cool/check-user-permission@v2 + with: + require: 'admin' + username: ${{ github.triggering_actor }} + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + - uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + - name: Set git config + run: | + git config user.email $BOT_USER_EMAIL + git config user.name $BOT_USER_NAME + - name: create release branch + id: create_branch + run: | + echo "Create UUID" + uuid=`uuidgen` + echo 'Creating release branch' + git checkout -b release/$uuid + git push -u origin release/$uuid + + echo 'Rush version to bump updates' + node common/scripts/install-run-rush.js version --bump -b release/$uuid --ignore-git-hooks + echo "source_branch=release/$uuid" >> $GITHUB_OUTPUT + + create-release-branch-pr-to-main: + runs-on: ubuntu-20.04 + needs: create-release-branch + outputs: + pr_number: ${{ steps.open-pr.outputs.pr_number }} + steps: + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT + + - uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + - name: create-pull-request + id: open-pr + uses: repo-sync/pull-request@v2 + with: + source_branch: ${{ needs.create-release-branch.outputs.source_branch }} + destination_branch: main + pr_title: release-${{ steps.date.outputs.date }} + pr_template: ".github/PULL_REQUEST_TEMPLATE.md" + pr_label: "auto-release-pr" + pr_allow_empty: false + github_token: ${{ secrets.GITHUB_TOKEN }} + + merge-release-pr-to-main: + runs-on: ubuntu-20.04 + needs: create-release-branch-pr-to-main + steps: + - name: enable merge commits + uses: octokit/request-action@v2.x + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: true + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Temporarily disable branch protection on main + uses: octokit/request-action@v2.x + with: + route: DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: main + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: merge release branch pr to main + uses: octokit/request-action@v2.x + with: + route: PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge + owner: aws-solutions + repo: fhir-works-on-aws + pull_number: ${{ needs.create-release-branch-pr-to-main.outputs.pr_number }} + merge_method: 'merge' + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: disable merge commits + uses: octokit/request-action@v2.x + if: always() # Make sure to enable branch protection even if other steps fail + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: false + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Enable branch protection on main + uses: octokit/request-action@v2.x + if: always() # Make sure to enable branch protection even if other steps fail + with: + route: POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: main + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} diff --git a/.github/workflows/deploy-smart.yaml b/.github/workflows/deploy-smart.yaml new file mode 100644 index 00000000..dc10c512 --- /dev/null +++ b/.github/workflows/deploy-smart.yaml @@ -0,0 +1,213 @@ +name: (SMART) Unit Tests, Deploy, Integration Test +on: + workflow_call: + +jobs: + build-validate: + name: Build and validate + runs-on: ubuntu-20.04 + steps: + - name: Checkout + uses: actions/checkout@v3 + + # install dependencies, build, and test + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Rush test + run: node common/scripts/install-run-rush.js test --verbose + - uses: actions/setup-java@v1 + with: + java-version: 1.8 + - name: Build Hapi validator + run: | + cd ./fwoa-utilities/javaHapiValidatorLambda + mvn --batch-mode --update-snapshots --no-transfer-progress clean install + deploy: + needs: build-validate + name: Deploy to Dev - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + environment: FWoA Integ Test Env + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-east-1 + issuerEndpointSecretName: SMART_ISSUER_ENDPOINT + oAuth2ApiEndpointSecretName: SMART_OAUTH2_API_ENDPOINT + patientPickerEndpointSecretName: SMART_PATIENT_PICKER_ENDPOINT + - enableMultiTenancy: true + region: us-east-2 + issuerEndpointSecretName: MULTITENANCY_SMART_ISSUER_ENDPOINT + oAuth2ApiEndpointSecretName: MULTITENANCY_SMART_OAUTH2_API_ENDPOINT + patientPickerEndpointSecretName: MULTITENANCY_SMART_PATIENT_PICKER_ENDPOINT + # These permissions are needed to interact with GitHub's OIDC Token endpoint. + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + - uses: actions/setup-java@v1 + with: + java-version: 1.8 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Base Action + uses: ./.github/actions/baseAction + + # for compile igs we would need to use node v16 due to ERR_PACKAGE_PATH_NOT_EXPORTED in elasticsearch package + - name: Use Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: '16.x' + - name: Download US Core IG + # NOTE if updating the IG version. Please see update implementationGuides.test.ts test too. + run: | + cd ./solutions/smart-deployment + mkdir -p implementationGuides + curl http://hl7.org/fhir/us/core/STU3.1.1/package.tgz | tar xz -C implementationGuides + - name: Compile IGs + run: | + npm install -g ts-node + cd ./solutions/smart-deployment + node ../../common/scripts/install-run-rushx.js compile-igs + - name: Setup allowList for Subscriptions integ tests + run: cp solutions/smart-deployment/src/integration-tests/infrastructure/allowList-integTests.ts solutions/smart-deployment/src/subscriptions/allowList.ts + - name: Build Hapi validator + run: | + cd ./fwoa-utilities/javaHapiValidatorLambda + mvn --batch-mode --update-snapshots --no-transfer-progress clean install + + - name: Deploy FWoA with CDK + run: | + cd ./solutions/smart-deployment + node ../../common/scripts/install-run-rushx.js deploy -c issuerEndpoint=${{ secrets[matrix.issuerEndpointSecretName] }} -c oAuth2ApiEndpoint=${{ secrets[matrix.oAuth2ApiEndpointSecretName] }} -c patientPickerEndpoint=${{ secrets[matrix.patientPickerEndpointSecretName] }} -c enableSubscriptions=true -c useHapiValidator=true -c region=${{ matrix.region }} -c enableMultiTenancy=${{ matrix.enableMultiTenancy }} --all --require-approval=never + inferno-test: + needs: deploy + name: Run Inferno Tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + include: + - enableMultiTenancy: false + region: us-east-1 + serviceUrlSuffix: '' + smartServiceURLSecretName: SMART_SERVICE_URL + - enableMultiTenancy: true + region: us-east-2 + serviceUrlSuffix: /tenant/tenant1 + smartServiceURLSecretName: MULTITENANCY_SMART_SERVICE_URL + permissions: + id-token: write + contents: read + steps: + - uses: actions/checkout@v3 + with: + repository: nguyen102/inferno + ref: fhir-works + - uses: actions/setup-ruby@v1 + with: + ruby-version: '2.6' + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Install dependency + run: | + gem install bundler + bundle install + - name: Execute Inferno tests + env: + SERVICE_URL: ${{ secrets[matrix.smartServiceURLSecretName]}}${{ matrix.serviceUrlSuffix }} + CLIENT_ID: ${{ secrets.SMART_AUTH_CLIENT_ID}} + CLIENT_SECRET: ${{ secrets.SMART_AUTH_CLIENT_SECRET }} + AUTH_ENDPOINT: ${{ secrets.SMART_AUTH_ENDPOINT }} + TOKEN_ENDPOINT: ${{ secrets.SMART_TOKEN_ENDPOINT}} + AUTH_USERNAME: ${{ secrets.SMART_AUTH_USERNAME}} + AUTH_PASSWORD: ${{ secrets.SMART_AUTH_PASSWORD}} + run: | + cp fhir-works-example.json fhir-works.json + sed -i -e "s#SERVER_ENDPOINT#$SERVICE_URL#g" fhir-works.json + sed -i -e "s#CLIENT_ID#$CLIENT_ID#g" fhir-works.json + sed -i -e "s#CLIENT_SECRET#$CLIENT_SECRET#g" fhir-works.json + sed -i -e "s#AUTH_ENDPOINT#$AUTH_ENDPOINT#g" fhir-works.json + sed -i -e "s#TOKEN_ENDPOINT#$TOKEN_ENDPOINT#g" fhir-works.json + sed -i -e "s#AUTH_USERNAME#$AUTH_USERNAME#g" fhir-works.json + sed -i -e "s#AUTH_PASSWORD#$AUTH_PASSWORD#g" fhir-works.json + sed -i -e "s/okta-signin-username/input28/g" fhir-works.json + sed -i -e "s/okta-signin-password/input36/g" fhir-works.json + sed -i -e "s/okta-signin-submit/button-primary/g" fhir-works.json + sed -i -e "14 s/id/class/" fhir-works.json + bundle exec rake db:create db:schema:load + bundle exec rake inferno:execute_batch[fhir-works.json] + custom-integration-tests: + needs: inferno-test + name: Run custom integration tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + environment: FWoA Integ Test Env + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-east-1 + smartOauth2ApiEndpointSecretName: SMART_OAUTH2_API_ENDPOINT + smartAuthUsernameSecretName: SMART_AUTH_USERNAME + smartAuthAdminUsernameSecretName: SMART_AUTH_ADMIN_USERNAME + smartServiceURLSecretName: SMART_SERVICE_URL + smartApiKeySecretName: SMART_API_KEY + subscriptionsNotificationsTableSecretName: SMART_SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: SMART_SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: SMART_SUBSCRIPTIONS_API_KEY + - enableMultiTenancy: true + region: us-east-2 + smartOauth2ApiEndpointSecretName: MULTITENANCY_SMART_OAUTH2_API_ENDPOINT + smartAuthUsernameSecretName: MULTITENANCY_SMART_AUTH_USERNAME + smartAuthAdminUsernameSecretName: MULTITENANCY_SMART_AUTH_ADMIN_USERNAME + smartServiceURLSecretName: MULTITENANCY_SMART_SERVICE_URL + smartApiKeySecretName: MULTITENANCY_SMART_API_KEY + subscriptionsNotificationsTableSecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_API_KEY + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Base Action + uses: ./.github/actions/baseAction + - name: Execute tests + env: + SMART_OAUTH2_API_ENDPOINT: ${{ secrets[matrix.smartOauth2ApiEndpointSecretName] }} + SMART_INTEGRATION_TEST_CLIENT_ID: ${{ secrets.SMART_INTEGRATION_TEST_CLIENT_ID}} + SMART_INTEGRATION_TEST_CLIENT_PW: ${{ secrets.SMART_INTEGRATION_TEST_CLIENT_PW}} + SMART_AUTH_USERNAME: ${{ secrets[matrix.smartAuthUsernameSecretName] }} + SMART_AUTH_ADMIN_USERNAME: ${{ secrets[matrix.smartAuthAdminUsernameSecretName] }} + SMART_AUTH_ADMIN_ANOTHER_TENANT_USERNAME: ${{ secrets.SMART_AUTH_ADMIN_ANOTHER_TENANT_USERNAME}} + SMART_AUTH_PASSWORD: ${{ secrets.SMART_AUTH_PASSWORD}} + SMART_SERVICE_URL: ${{ secrets[matrix.smartServiceURLSecretName] }} + SMART_API_KEY: ${{ secrets[matrix.smartApiKeySecretName] }} + MULTI_TENANCY_ENABLED: ${{ matrix.enableMultiTenancy }} + SUBSCRIPTIONS_ENABLED: 'true' + SUBSCRIPTIONS_NOTIFICATIONS_TABLE: ${{ secrets[matrix.subscriptionsNotificationsTableSecretName] }} + SUBSCRIPTIONS_ENDPOINT: ${{ secrets[matrix.subscriptionsEndpointSecretName] }} + SUBSCRIPTIONS_API_KEY: ${{ secrets[matrix.subscriptionsApiKeySecretName] }} + AWS_REGION: ${{ matrix.region }} + API_AWS_REGION: ${{ matrix.region }} + run: | + cd ./solutions/smart-deployment + node ../../common/scripts/install-run-rushx.js int-test diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml new file mode 100644 index 00000000..2d7abfb2 --- /dev/null +++ b/.github/workflows/deploy.yaml @@ -0,0 +1,210 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Unit Tests, Deploy, Integration Test +on: + workflow_call: + +jobs: + build-validate: + name: Build and validate + runs-on: ubuntu-20.04 + steps: + - name: Checkout + uses: actions/checkout@v3 + + # install dependencies, build, and test + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Rush test + run: node common/scripts/install-run-rush.js test --verbose + + - uses: actions/setup-java@v1 + with: + java-version: 1.8 + + - name: Build Hapi validator + run: | + cd ./fwoa-utilities/javaHapiValidatorLambda + mvn --batch-mode --update-snapshots --no-transfer-progress clean install + deploy: + needs: build-validate + name: Deploy to Dev - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-west-1 + - enableMultiTenancy: true + region: us-west-2 + # These permissions are needed to interact with GitHub's OIDC Token endpoint. + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + + - uses: actions/setup-java@v1 + with: + java-version: 1.8 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + + - name: Base Action + uses: ./.github/actions/baseAction + + # for compile igs we would need to use node v16 due to ERR_PACKAGE_PATH_NOT_EXPORTED in elasticsearch package + - name: Use Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: '16.x' + - name: Download US Core IG + # NOTE if updating the IG version. Please see update implementationGuides.test.ts test too. + run: | + cd ./solutions/deployment + mkdir -p implementationGuides + curl http://hl7.org/fhir/us/core/STU3.1.1/package.tgz | tar xz -C implementationGuides + - name: Compile IGs + run: | + npm install -g ts-node + cd ./solutions/deployment + node ../../common/scripts/install-run-rushx.js compile-igs + + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + + - name: Setup allowList for Subscriptions integ tests + run: cp solutions/deployment/src/integration-tests/infrastructure/allowList-integTests.ts solutions/deployment/src/subscriptions/allowList.ts + + - name: Deploy Hapi validator + run: | + cd ./fwoa-utilities/javaHapiValidatorLambda + mvn --batch-mode --update-snapshots --no-transfer-progress clean install + - name: Deploy FHIR Server and Hapi Validator with CDK + run: | + cd ./solutions/deployment + node ../../common/scripts/install-run-rushx.js deploy -c region=${{ matrix.region }} -c useHapiValidator=true -c enableMultiTenancy=${{ matrix.enableMultiTenancy }} -c enableSubscriptions=true --all --require-approval never + crucible-test: + needs: deploy + name: Run Crucible Tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-west-1 + serviceUrlSuffix: '' + serviceUrlSecretName: SERVICE_URL + cognitoClientIdSecretName: COGNITO_CLIENT_ID + apiKeySecretName: API_KEY + - enableMultiTenancy: true + region: us-west-2 + serviceUrlSuffix: /tenant/tenant1 + serviceUrlSecretName: MULTITENANCY_SERVICE_URL + cognitoClientIdSecretName: MULTITENANCY_COGNITO_CLIENT_ID + apiKeySecretName: MULTITENANCY_API_KEY + permissions: + id-token: write + contents: read + steps: + - uses: actions/checkout@v3 + with: + repository: nguyen102/plan_executor + ref: r4-aws-fhir-solution + - uses: actions/setup-ruby@v1 + with: + ruby-version: '2.6' + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Install dependency + run: | + gem install bundler + bundle install + - name: Execute tests + env: + SERVICE_URL: ${{ secrets[matrix.serviceUrlSecretName] }}${{ matrix.serviceUrlSuffix }} + API_KEY: ${{ secrets[matrix.apiKeySecretName] }} + COGNITO_CLIENT_ID: ${{ secrets[matrix.cognitoClientIdSecretName] }} + COGNITO_USERNAME: ${{ secrets.COGNITO_USERNAME_PRACTITIONER }} + COGNITO_PASSWORD: ${{ secrets.COGNITO_PASSWORD }} + run: | + ACCESS_TOKEN=$(aws cognito-idp initiate-auth --region ${{ matrix.region }} --client-id $COGNITO_CLIENT_ID \ + --auth-flow USER_PASSWORD_AUTH --auth-parameters USERNAME=$COGNITO_USERNAME,PASSWORD=$COGNITO_PASSWORD | \ + python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["AuthenticationResult"]["IdToken"])') + echo User authenticated + bundle exec rake crucible:execute_hearth_tests[$SERVICE_URL,$API_KEY,$ACCESS_TOKEN] + custom-integration-tests: + needs: crucible-test + name: Run custom integration tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-west-1 + serviceUrlSecretName: SERVICE_URL + cognitoClientIdSecretName: COGNITO_CLIENT_ID + apiKeySecretName: API_KEY + subscriptionsNotificationsTableSecretName: SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: SUBSCRIPTIONS_API_KEY + - enableMultiTenancy: true + region: us-west-2 + serviceUrlSecretName: MULTITENANCY_SERVICE_URL + cognitoClientIdSecretName: MULTITENANCY_COGNITO_CLIENT_ID + apiKeySecretName: MULTITENANCY_API_KEY + subscriptionsNotificationsTableSecretName: MULTITENANCY_SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: MULTITENANCY_SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: MULTITENANCY_SUBSCRIPTIONS_API_KEY + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Execute tests + env: + API_URL: ${{ secrets[matrix.serviceUrlSecretName] }} + API_KEY: ${{ secrets[matrix.apiKeySecretName] }} + API_AWS_REGION: ${{ matrix.region }} + COGNITO_CLIENT_ID: ${{ secrets[matrix.cognitoClientIdSecretName] }} + COGNITO_USERNAME_PRACTITIONER: ${{ secrets.COGNITO_USERNAME_PRACTITIONER }} + COGNITO_USERNAME_AUDITOR: ${{ secrets.COGNITO_USERNAME_AUDITOR }} + COGNITO_USERNAME_PRACTITIONER_ANOTHER_TENANT: ${{ secrets.COGNITO_USERNAME_PRACTITIONER_ANOTHER_TENANT }} + COGNITO_PASSWORD: ${{ secrets.COGNITO_PASSWORD }} + MULTI_TENANCY_ENABLED: ${{ matrix.enableMultiTenancy }} + SUBSCRIPTIONS_ENABLED: 'true' + SUBSCRIPTIONS_NOTIFICATIONS_TABLE: ${{ secrets[matrix.subscriptionsNotificationsTableSecretName] }} + SUBSCRIPTIONS_ENDPOINT: ${{ secrets[matrix.subscriptionsEndpointSecretName] }} + SUBSCRIPTIONS_API_KEY: ${{ secrets[matrix.subscriptionsApiKeySecretName] }} + AWS_REGION: ${{ matrix.region }} + run: | + cd ./solutions/deployment + node ../../common/scripts/install-run-rushx.js int-test diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 00000000..2c55d0c6 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,21 @@ +name: labeler + +on: [pull_request] + +jobs: + labeler: + runs-on: ubuntu-latest + name: Label the PR size + steps: + - uses: codelytv/pr-size-labeler@v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + xs_max_size: '10' + s_max_size: '100' + m_max_size: '500' + l_max_size: '1000' + fail_if_xl: 'false' + message_if_xl: > + 'This PR exceeds the recommended size of 1000 lines. + Please make sure you are NOT addressing multiple issues with one PR. + Note this PR might be rejected due to its size.’ diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml new file mode 100644 index 00000000..66c7c8b3 --- /dev/null +++ b/.github/workflows/lint-pr.yml @@ -0,0 +1,14 @@ +name: "Lint-pr" + +on: + pull_request: + types: [opened, synchronize, edited, reopened] + +jobs: + lint-pr: + name: Validate PR title + runs-on: ubuntu-20.04 + steps: + - uses: amannn/action-semantic-pull-request@v4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/merge-develop-to-stage.yml b/.github/workflows/merge-develop-to-stage.yml new file mode 100644 index 00000000..58daa57f --- /dev/null +++ b/.github/workflows/merge-develop-to-stage.yml @@ -0,0 +1,120 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: Merge develop to stage +on: + push: + branches: + - develop + +jobs: + pre-deployment-check: + name: Pre deployment check + runs-on: ubuntu-20.04 + timeout-minutes: 120 + steps: + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + - name: Block Concurrent Deployments + uses: softprops/turnstyle@v1 + with: + poll-interval-seconds: 10 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + deploy-fhir: + needs: pre-deployment-check + uses: + ./.github/workflows/deploy.yaml + secrets: inherit + deploy-smart-fhir: + needs: pre-deployment-check + uses: + ./.github/workflows/deploy-smart.yaml + secrets: inherit + merge-develop-to-stage: + name: Merge develop to stage + runs-on: ubuntu-20.04 + needs: [deploy-fhir, deploy-smart-fhir] + if: ${{ (needs.deploy-fhir.result=='success') && (needs.deploy-smart-fhir.result=='success') }} + steps: + - uses: actions/checkout@v3 + with: + token: ${{ secrets.BOT_TOKEN }} + fetch-depth: 0 + + # use merge commit instead of squash + - name: Temporarily disable branch protection on stage + uses: octokit/request-action@v2.x + with: + route: DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: stage + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: enable merge commits + uses: octokit/request-action@v2.x + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: true + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Merge to stage + run: | + echo "Set the COMMIT_ID to merge to stage, to current workflow commit id" + COMMIT_ID=$GITHUB_SHA + git checkout stage + echo + echo " Attempting to merge the 'develop' branch ($COMMIT_ID)" + echo " into the 'stage' branch ($(git log -1 --pretty=%H stage))" + echo + git merge $COMMIT_ID --ff-only --no-edit + git push origin stage + + - name: disable merge commits + uses: octokit/request-action@v2.x + if: always() # Make sure to disable merge commit even if other steps fail + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: false + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Enable branch protection on stage + uses: octokit/request-action@v2.x + if: always() # Make sure to enable branch protection even if other steps fail + with: + route: POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: stage + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + slack-notification-tests-failure: + runs-on: ubuntu-20.04 + needs: [deploy-fhir, deploy-smart-fhir] + if: failure() + steps: + - name: Slack notification when tests in deployment or smart-deployment failed + id: slack + uses: slackapi/slack-github-action@v1.23.0 + with: + payload: | + { + "slack_message": "On ${{ github.ref_name }} branch of ${{ github.repository }}:", + "deploy_result": "${{ needs.deploy-fhir.result }}", + "smart_deploy_result": "${{ needs.deploy-smart-fhir.result }}", + "workflow_url": "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_INTEGRATION_WEBHOOK_URL }} diff --git a/.github/workflows/no-response.yaml b/.github/workflows/no-response.yaml new file mode 100644 index 00000000..ca115f0a --- /dev/null +++ b/.github/workflows/no-response.yaml @@ -0,0 +1,25 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: No Response + +# Both `issue_comment` and `scheduled` event types are required for this Action +# to work properly. +on: + issue_comment: + types: [created] + schedule: + # Schedule for five minutes after the hour, every hour + - cron: '5 * * * *' + +jobs: + noResponse: + runs-on: ubuntu-latest + steps: + - uses: lee-dohm/no-response@v0.5.0 + with: + token: ${{ github.token }} + responseRequiredLabel: closing-soon-if-no-response + daysUntilClose: 7 diff --git a/.github/workflows/pnpm-audit-periodically-with-notification.yml b/.github/workflows/pnpm-audit-periodically-with-notification.yml new file mode 100644 index 00000000..912d69ff --- /dev/null +++ b/.github/workflows/pnpm-audit-periodically-with-notification.yml @@ -0,0 +1,29 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: pnpm-audit-schedule-with-notification +on: + schedule: + - cron: '0 15 * * 1-5' +jobs: + pnpm-audit-work: + uses: + ./.github/workflows/pnpm-audit-pr.yml + slack-notification: + needs: pnpm-audit-work + runs-on: ubuntu-20.04 + if: failure() + steps: + - name: Slack notification when pnpm-audit-work fails + id: slack + uses: slackapi/slack-github-action@v1.23.0 + with: + payload: | + { + "slack_message": "On ${{ github.ref_name }} branch of ${{ github.repository }}:\nGithub pnpm audit workflow result: ${{ needs.pnpm-audit-work.result }}", + "workflow_url": "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} \ No newline at end of file diff --git a/.github/workflows/pnpm-audit-pr.yml b/.github/workflows/pnpm-audit-pr.yml new file mode 100644 index 00000000..258849c8 --- /dev/null +++ b/.github/workflows/pnpm-audit-pr.yml @@ -0,0 +1,55 @@ +# +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +name: pnpm-audit +on: + pull_request: + types: [opened, synchronize, edited, reopened] + workflow_call: +jobs: + pnpm-audit: + runs-on: ubuntu-20.04 + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + + - name: Cache Rush + uses: actions/cache@v3 + with: + path: | + common/temp/install-run + ~/.rush + key: ${{ runner.os }}-rush-${{ hashFiles('rush.json') }} + restore-keys: | + ${{ runner.os }}-rush- + ${{ runner.os }}- + - name: Cache pnpm + uses: actions/cache@v3 + with: + path: | + common/temp/pnpm-store + key: ${{ runner.os }}-pnpm-${{ hashFiles('common/config/rush/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm- + ${{ runner.os }}- + - name: install pnpm + uses: pnpm/action-setup@v2.2.2 + with: + version: 6.32 + + - name: Install dependecies + run: node common/scripts/install-run-rush.js install + + - name: pnpm-audit + run: | + cd common/temp + pnpm audit --prod \ No newline at end of file diff --git a/.github/workflows/publish-and-merge-to-develop.yml b/.github/workflows/publish-and-merge-to-develop.yml new file mode 100644 index 00000000..39abcc73 --- /dev/null +++ b/.github/workflows/publish-and-merge-to-develop.yml @@ -0,0 +1,98 @@ +name: Publish-and-merge-to-develop +on: + pull_request_target: + types: + - closed + branches: + - main + +jobs: + rush-publish: + if: github.event.pull_request.merged == true && vars.PUBLISH_TO_NPM == 'true' + runs-on: ubuntu-20.04 + steps: + - name: git checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.BOT_TOKEN }} + fetch-depth: 0 + + - name: Base Action + uses: ./.github/actions/baseAction + + - name: Set git config + run: | + git config user.email ${{ vars.BOT_USER_EMAIL }} + git config user.name ${{ vars.BOT_USER_NAME }} + + - name: Rush Publish + run: | + PUBLISH_CMD="--add-commit-details --apply --npm-auth-token ${{ secrets.NPM_AUTH_TOKEN }} --ignore-git-hooks --include-all --set-access-level public --target-branch main --publish" + node common/scripts/install-run-rush.js publish $PUBLISH_CMD + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + merge-main-to-develop: + name: Merge main to develop + runs-on: ubuntu-20.04 + needs: [rush-publish] + steps: + - uses: actions/checkout@v3 + with: + token: ${{ secrets.BOT_TOKEN }} + fetch-depth: 0 + + # use merge commit instead of squash + - name: Temporarily disable branch protection on develop + uses: octokit/request-action@v2.x + with: + route: DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: develop + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: enable merge commits + uses: octokit/request-action@v2.x + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: true + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Merge to develop + run: | + echo "Set the COMMIT_ID to merge to develop, to current workflow commit id" + COMMIT_ID=$GITHUB_SHA + git checkout develop + echo + echo " Attempting to merge the 'main' branch ($COMMIT_ID)" + echo " into the 'develop' branch ($(git log -1 --pretty=%H develop))" + echo + git merge $COMMIT_ID --ff-only --no-edit + git push origin develop + + - name: disable merge commits + uses: octokit/request-action@v2.x + if: always() # Make sure to disable merge commit even if other steps fail + with: + route: PATCH /repos/{owner}/{repo} + owner: aws-solutions + repo: fhir-works-on-aws + allow_merge_commit: false + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} + + - name: Enable branch protection on develop + uses: octokit/request-action@v2.x + if: always() # Make sure to enable branch protection even if other steps fail + with: + route: POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins + owner: aws-solutions + repo: fhir-works-on-aws + branch: develop + env: + GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} diff --git a/.github/workflows/smart-integ-inferno-test-periodically.yaml b/.github/workflows/smart-integ-inferno-test-periodically.yaml new file mode 100644 index 00000000..42e6262e --- /dev/null +++ b/.github/workflows/smart-integ-inferno-test-periodically.yaml @@ -0,0 +1,147 @@ +name: (SMART) Unit Tests, Integration Test periodically +on: + schedule: + - cron: '0 13 1,15 * *' + +jobs: + inferno-test: + name: Run Inferno Tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + include: + - enableMultiTenancy: false + region: us-east-1 + serviceUrlSuffix: '' + smartServiceURLSecretName: SMART_SERVICE_URL + - enableMultiTenancy: true + region: us-east-2 + serviceUrlSuffix: /tenant/tenant1 + smartServiceURLSecretName: MULTITENANCY_SMART_SERVICE_URL + permissions: + id-token: write + contents: read + steps: + - uses: actions/checkout@v3 + with: + repository: nguyen102/inferno + ref: fhir-works + - uses: actions/setup-ruby@v1 + with: + ruby-version: '2.6' + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Install dependency + run: | + gem install bundler + bundle install + - name: Execute Inferno tests + env: + SERVICE_URL: ${{ secrets[matrix.smartServiceURLSecretName]}}${{ matrix.serviceUrlSuffix }} + CLIENT_ID: ${{ secrets.SMART_AUTH_CLIENT_ID}} + CLIENT_SECRET: ${{ secrets.SMART_AUTH_CLIENT_SECRET }} + AUTH_ENDPOINT: ${{ secrets.SMART_AUTH_ENDPOINT }} + TOKEN_ENDPOINT: ${{ secrets.SMART_TOKEN_ENDPOINT}} + AUTH_USERNAME: ${{ secrets.SMART_AUTH_USERNAME}} + AUTH_PASSWORD: ${{ secrets.SMART_AUTH_PASSWORD}} + run: | + cp fhir-works-example.json fhir-works.json + sed -i -e "s#SERVER_ENDPOINT#$SERVICE_URL#g" fhir-works.json + sed -i -e "s#CLIENT_ID#$CLIENT_ID#g" fhir-works.json + sed -i -e "s#CLIENT_SECRET#$CLIENT_SECRET#g" fhir-works.json + sed -i -e "s#AUTH_ENDPOINT#$AUTH_ENDPOINT#g" fhir-works.json + sed -i -e "s#TOKEN_ENDPOINT#$TOKEN_ENDPOINT#g" fhir-works.json + sed -i -e "s#AUTH_USERNAME#$AUTH_USERNAME#g" fhir-works.json + sed -i -e "s#AUTH_PASSWORD#$AUTH_PASSWORD#g" fhir-works.json + sed -i -e "s/okta-signin-username/input28/g" fhir-works.json + sed -i -e "s/okta-signin-password/input36/g" fhir-works.json + sed -i -e "s/okta-signin-submit/button-primary/g" fhir-works.json + sed -i -e "14 s/id/class/" fhir-works.json + bundle exec rake db:create db:schema:load + bundle exec rake inferno:execute_batch[fhir-works.json] + custom-integration-tests: + needs: inferno-test + name: Run custom integration tests - enableMultiTenancy=${{ matrix.enableMultiTenancy }} + environment: FWoA Integ Test Env + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - enableMultiTenancy: false + region: us-east-1 + smartOauth2ApiEndpointSecretName: SMART_OAUTH2_API_ENDPOINT + smartAuthUsernameSecretName: SMART_AUTH_USERNAME + smartAuthAdminUsernameSecretName: SMART_AUTH_ADMIN_USERNAME + smartServiceURLSecretName: SMART_SERVICE_URL + smartApiKeySecretName: SMART_API_KEY + subscriptionsNotificationsTableSecretName: SMART_SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: SMART_SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: SMART_SUBSCRIPTIONS_API_KEY + - enableMultiTenancy: true + region: us-east-2 + smartOauth2ApiEndpointSecretName: MULTITENANCY_SMART_OAUTH2_API_ENDPOINT + smartAuthUsernameSecretName: MULTITENANCY_SMART_AUTH_USERNAME + smartAuthAdminUsernameSecretName: MULTITENANCY_SMART_AUTH_ADMIN_USERNAME + smartServiceURLSecretName: MULTITENANCY_SMART_SERVICE_URL + smartApiKeySecretName: MULTITENANCY_SMART_API_KEY + subscriptionsNotificationsTableSecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_NOTIFICATIONS_TABLE + subscriptionsEndpointSecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_ENDPOINT + subscriptionsApiKeySecretName: MULTITENANCY_SMART_SUBSCRIPTIONS_API_KEY + permissions: + id-token: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.AWS_ACCESS_ROLE_ARN }} + role-duration-seconds: 7200 + - name: Base Action + uses: ./.github/actions/baseAction + - name: Execute tests + env: + SMART_OAUTH2_API_ENDPOINT: ${{ secrets[matrix.smartOauth2ApiEndpointSecretName] }} + SMART_INTEGRATION_TEST_CLIENT_ID: ${{ secrets.SMART_INTEGRATION_TEST_CLIENT_ID}} + SMART_INTEGRATION_TEST_CLIENT_PW: ${{ secrets.SMART_INTEGRATION_TEST_CLIENT_PW}} + SMART_AUTH_USERNAME: ${{ secrets[matrix.smartAuthUsernameSecretName] }} + SMART_AUTH_ADMIN_USERNAME: ${{ secrets[matrix.smartAuthAdminUsernameSecretName] }} + SMART_AUTH_ADMIN_ANOTHER_TENANT_USERNAME: ${{ secrets.SMART_AUTH_ADMIN_ANOTHER_TENANT_USERNAME}} + SMART_AUTH_PASSWORD: ${{ secrets.SMART_AUTH_PASSWORD}} + SMART_SERVICE_URL: ${{ secrets[matrix.smartServiceURLSecretName] }} + SMART_API_KEY: ${{ secrets[matrix.smartApiKeySecretName] }} + MULTI_TENANCY_ENABLED: ${{ matrix.enableMultiTenancy }} + SUBSCRIPTIONS_ENABLED: 'true' + SUBSCRIPTIONS_NOTIFICATIONS_TABLE: ${{ secrets[matrix.subscriptionsNotificationsTableSecretName] }} + SUBSCRIPTIONS_ENDPOINT: ${{ secrets[matrix.subscriptionsEndpointSecretName] }} + SUBSCRIPTIONS_API_KEY: ${{ secrets[matrix.subscriptionsApiKeySecretName] }} + AWS_REGION: ${{ matrix.region }} + API_AWS_REGION: ${{ matrix.region }} + run: | + cd ./solutions/smart-deployment + node ../../common/scripts/install-run-rushx.js int-test + slack-notification: + needs: [inferno-test, custom-integration-tests] + runs-on: ubuntu-20.04 + if: failure() + steps: + - name: Slack notification when inferno tests or integration tests failed + id: slack + uses: slackapi/slack-github-action@v1.23.0 + with: + payload: | + { + "slack_message": "On ${{ github.ref_name }} branch of ${{ github.repository }}:", + "inferno_result": "${{ needs.inferno-test.result }}", + "integration_result": "${{ needs.custom-integration-tests.result }}", + "workflow_url": "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_SCHEDULED_INTEGRATION_WEBHOOK_URL }} diff --git a/.github/workflows/solutions-pipeline.yml b/.github/workflows/solutions-pipeline.yml new file mode 100644 index 00000000..a268ce52 --- /dev/null +++ b/.github/workflows/solutions-pipeline.yml @@ -0,0 +1,26 @@ +name: Solutions Pipeline Workflow + +env: + REGION: us-east-1 + +on: push + +jobs: + pipeline-job: + name: Trigger Solutions Pipeline + if: github.repository_owner == 'aws-solutions' + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.DISPATCHER_ROLE_ARN }} + aws-region: ${{ env.REGION }} + role-duration-seconds: 900 + role-session-name: OIDCSession + - name: Run CodeBuild + uses: aws-actions/aws-codebuild-run-build@v1 + with: + project-name: ${{ secrets.DISPATCHER_CODEBUILD_PROJECT_NAME }} \ No newline at end of file diff --git a/.github/workflows/solutions-pull-request.yml b/.github/workflows/solutions-pull-request.yml new file mode 100644 index 00000000..2ad54dac --- /dev/null +++ b/.github/workflows/solutions-pull-request.yml @@ -0,0 +1,23 @@ +name: Solutions Pull Request + +on: + pull_request: + types: [opened, edited, reopened, synchronize] + +jobs: + pull-request-job: + name: Viperlight Scan + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Viperlight + run: | + wget -q https://viperlight-scanner.s3.amazonaws.com/latest/.viperlightrc + wget -q https://viperlight-scanner.s3.amazonaws.com/latest/viperlight.zip + unzip -q viperlight.zip -d ../viperlight + rm -r ./viperlight.zip + echo "Content scanning utility installation complete `date`" + echo "Starting content scanning `date` in `pwd`" + ../viperlight/bin/viperlight scan -m files-contents -m files-aws -m files-binary -m files-entropy -m files-secrets + echo "Completed content scanning `date`" \ No newline at end of file diff --git a/.github/workflows/verify-changes.yml b/.github/workflows/verify-changes.yml new file mode 100644 index 00000000..01f93b55 --- /dev/null +++ b/.github/workflows/verify-changes.yml @@ -0,0 +1,23 @@ +name: Verify Changefiles +on: + pull_request: + types: [opened, synchronize, edited, reopened] + branches: + - develop + +jobs: + run-rush-change-verify: + runs-on: ubuntu-20.04 + steps: + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: '18.x' + - uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + - name: Rush verify changes + run: | + echo "checking if all changefiles were created" + node common/scripts/install-run-rush.js change --verify -b origin/develop \ No newline at end of file diff --git a/.viperlightrc b/.viperlightrc deleted file mode 100644 index 071e1c37..00000000 --- a/.viperlightrc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "all": true, - "failOn": "medium" -} \ No newline at end of file diff --git a/brazil.ion b/brazil.ion deleted file mode 100644 index b17b4333..00000000 --- a/brazil.ion +++ /dev/null @@ -1,33 +0,0 @@ -'brazil_package_spec@1.0' - -common::{ - name: "FHIR-Works-on-AWS", - major_version: "1.0", - - dependencies: { - default_closure: run, - - closures: { - run: public::{ - include: [self], - }, - }, - }, - - build: { - command: null, - - env: { - PATH: [ - (env PATH), - ], - }, - - outputs: { - public_dir: null, - private_dir: null, - }, - - cleaned: [], - }, -} diff --git a/fwoa-tools/src/migrationExport.test.ts b/fwoa-tools/src/migrationExport.test.ts index 70e28e96..74216f70 100644 --- a/fwoa-tools/src/migrationExport.test.ts +++ b/fwoa-tools/src/migrationExport.test.ts @@ -4,78 +4,80 @@ */ jest.mock('./migrationUtils', () => ({ checkConfiguration: () => {} })); jest.mock('./exportHelper', () => ({ - startExportJob: () => { - return { jobId: 'fakeJobId1', jobRunId: 'fakeJobRunId1' }; - }, - getExportStatus: () => {}, - getExportStateFile: () => { - return { - jobId: 'fakeJobId-1', - file_names: ['file1', 'file2'] - }; - } + startExportJob: () => {return {jobId: 'fakeJobId1', jobRunId: 'fakeJobRunId1'}}, + getExportStatus: () => {}, + getExportStateFile: () => {return { + jobId: 'fakeJobId-1', + file_names: ['file1', 'file2'] + }} })); -import { buildRunScriptParams, parseCmdOptions, runScript } from './migrationExport'; +import {buildRunScriptParams, parseCmdOptions, runScript} from "./migrationExport"; describe('migrationExport', () => { - describe('parseCmdOptions', () => { - test('smart, dryrun, and since enabled', () => { - process.argv = [ - '/usr/local/bin/ts-node', - 'migrationExport.ts', - '-s', - '-d', - '-t', - '1800-01-01T00:00:00.000Z' - ]; + describe('parseCmdOptions', () => { + test('smart, dryrun, and since enabled', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationExport.ts', + '-s', + '-d', + '-t', + '1800-01-01T00:00:00.000Z' + ]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const argv: any = parseCmdOptions(); - expect(argv.dryRun).toEqual(true); - expect(argv.smart).toEqual(true); - expect(argv.since).toEqual('1800-01-01T00:00:00.000Z'); - }); - test('smart, dryrun, and since not enabled', () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationExport.ts']; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const argv: any = parseCmdOptions(); + expect(argv.dryRun).toEqual(true); + expect(argv.smart).toEqual(true); + expect(argv.since).toEqual("1800-01-01T00:00:00.000Z"); + }); + test('smart, dryrun, and since not enabled', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationExport.ts' + ]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const argv: any = parseCmdOptions(); - expect(argv.dryRun).toEqual(false); - expect(argv.smart).toEqual(false); - expect(argv.since).toBeNull(); - }); - }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const argv: any = parseCmdOptions(); + expect(argv.dryRun).toEqual(false); + expect(argv.smart).toEqual(false); + expect(argv.since).toBeNull(); + }); + }) - test('buildRunScriptParams', () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationExport.ts']; - const { smartClient, dryRun, since, snapshotExists, snapshotLocation } = buildRunScriptParams(); - expect(dryRun).toEqual(false); - expect(smartClient).toEqual(false); - expect(snapshotExists).toEqual(false); - expect(snapshotLocation).toBeNull(); - expect(since).toBeNull(); - }); - describe('runScript', () => { - test('smartClient - false', async () => { - await expect( - runScript(false, false, '1800-01-01T00:00:00.000Z', false, '') - ).resolves.not.toThrowError(); - }); - test('smartClient - true', async () => { - await expect(runScript(true, false, '1800-01-01T00:00:00.000Z', false, '')).resolves.not.toThrowError(); - }); - test('runScript - tenantId', async () => { - process.env.MIGRATION_TENANT_ID = 'tenant1'; - await expect( - runScript(false, false, '1800-01-01T00:00:00.000Z', false, '') - ).resolves.not.toThrowError(); - }); - test('runScript - invalidSinceDate', async () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationExport.ts', '-t', 'abc']; - await expect(runScript(false, false, 'incorrectSinceDate', false, '')).rejects.toThrowError( - 'Provided since timestamp not in correct format (ISO 8601)' - ); - }); - }); -}); + test ('buildRunScriptParams', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationExport.ts' + ]; + const {smartClient, dryRun, since, snapshotExists, snapshotLocation} = buildRunScriptParams(); + expect(dryRun).toEqual(false); + expect(smartClient).toEqual(false); + expect(snapshotExists).toEqual(false); + expect(snapshotLocation).toBeNull(); + expect(since).toBeNull(); + }) + describe('runScript', () => { + test('smartClient - false', async() => { + await expect(runScript(false, false, '1800-01-01T00:00:00.000Z', false, "" )).resolves.not.toThrowError(); + }) + test('smartClient - true', async() => { + await expect(runScript(true, false, '1800-01-01T00:00:00.000Z', false, "" )).resolves.not.toThrowError(); + }) + test('runScript - tenantId', async() => { + process.env.MIGRATION_TENANT_ID = "tenant1"; + await expect(runScript(false, false, '1800-01-01T00:00:00.000Z', false, "" )).resolves.not.toThrowError(); + }) + test('runScript - invalidSinceDate', async() => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationExport.ts', + '-t', + 'abc' + ]; + await expect(runScript(false, false, 'incorrectSinceDate', false, "" )) + .rejects.toThrowError('Provided since timestamp not in correct format (ISO 8601)'); + }) + }) +}) \ No newline at end of file diff --git a/fwoa-tools/src/migrationVerify.test.ts b/fwoa-tools/src/migrationVerify.test.ts index 891f15df..16b166eb 100644 --- a/fwoa-tools/src/migrationVerify.test.ts +++ b/fwoa-tools/src/migrationVerify.test.ts @@ -4,24 +4,24 @@ */ jest.mock('./migrationUtils', () => ({ - checkConfiguration: () => {}, - getFhirClientSMART: () => { - //eslint-disable-next-line @typescript-eslint/no-use-before-define - return axios.create(); - }, - getFhirClient: () => { - //eslint-disable-next-line @typescript-eslint/no-use-before-define - return axios.create(); - } + checkConfiguration: () => {}, + getFhirClientSMART: () => { + //eslint-disable-next-line @typescript-eslint/no-use-before-define + return axios.create(); + }, + getFhirClient: () => { + //eslint-disable-next-line @typescript-eslint/no-use-before-define + return axios.create(); + } })); import AWS from 'aws-sdk'; -import { GetObjectRequest } from 'aws-sdk/clients/s3'; +import { GetObjectRequest } from "aws-sdk/clients/s3"; import * as AWSMock from 'aws-sdk-mock'; -import axios from 'axios'; -import MockAdapter from 'axios-mock-adapter'; -import { logs } from './migrationImport'; -import { parseCmdOptions, buildRunScriptParams, runScript, verifyResource } from './migrationVerify'; +import axios from "axios"; +import MockAdapter from "axios-mock-adapter"; +import {logs} from "./migrationImport"; +import {parseCmdOptions, buildRunScriptParams, runScript, verifyResource} from "./migrationVerify"; let mock: MockAdapter; const env = process.env; @@ -29,124 +29,133 @@ const argv = process.argv; AWSMock.setSDKInstance(AWS); describe('migrationVerify', () => { - beforeAll(() => { - jest.spyOn(logs, 'write').mockImplementation((log: string) => { - console.log(log); - return true; + beforeAll(() => { + jest.spyOn(logs, 'write').mockImplementation((log: string) => { + console.log(log); + return true; + }); + jest.spyOn(logs, 'end').mockImplementation(jest.fn()); }); - jest.spyOn(logs, 'end').mockImplementation(jest.fn()); - }); - beforeEach(() => { - mock = new MockAdapter(axios); - AWSMock.restore(); - }); - afterEach(() => { - mock.reset(); - AWSMock.restore(); - process.env = env; - process.argv = argv; - }); - describe('parseCmdOptions', () => { - test('smart and dryrun enabled', () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationVerify.ts', '-s', '-d']; + beforeEach(() => { + mock = new MockAdapter(axios); + AWSMock.restore(); + }) + afterEach(() => { + mock.reset(); + AWSMock.restore(); + process.env = env; + process.argv = argv; + }) + describe('parseCmdOptions', () => { + test('smart and dryrun enabled', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationVerify.ts', + '-s', + '-d', + ]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const argv: any = parseCmdOptions(); - expect(argv.dryRun).toEqual(true); - expect(argv.smart).toEqual(true); - }); - test('smart and dryrun not enabled', () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationVerify.ts']; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const argv: any = parseCmdOptions(); + expect(argv.dryRun).toEqual(true); + expect(argv.smart).toEqual(true); + }); + test('smart and dryrun not enabled', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationVerify.ts' + ]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const argv: any = parseCmdOptions(); - expect(argv.dryRun).toEqual(false); - expect(argv.smart).toEqual(false); - }); - }); - test('buildRunScriptParams', () => { - process.argv = ['/usr/local/bin/ts-node', 'migrationExport.ts']; - const { smartClient, dryRun } = buildRunScriptParams(); - expect(dryRun).toEqual(false); - expect(smartClient).toEqual(false); - }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const argv: any = parseCmdOptions(); + expect(argv.dryRun).toEqual(false); + expect(argv.smart).toEqual(false); + }); + }) + test ('buildRunScriptParams', () => { + process.argv = [ + '/usr/local/bin/ts-node', + 'migrationExport.ts' + ]; + const {smartClient, dryRun} = buildRunScriptParams(); + expect(dryRun).toEqual(false); + expect(smartClient).toEqual(false); + }) - test('runScript', async () => { - process.env.EXPORT_BUCKET_NAME = 'fake-bucket-name'; - const fakeFileBody = JSON.stringify([ - { - resourceType: 'Patient', - id: 'unit_test_patient', - meta: { - tag: [] - } - }, - { - resourceType: 'Patient', - id: 'unit_test_patient2', - meta: { - tag: [] - } - } - ]); + test('runScript', async() => { + process.env.EXPORT_BUCKET_NAME = 'fake-bucket-name' + const fakeFileBody = JSON.stringify([ + { + "resourceType": "Patient", + "id": "unit_test_patient", + "meta": { + "tag": [] + } + }, + { + "resourceType": "Patient", + "id": "unit_test_patient2", + "meta": { + "tag": [] + } + } + ]); - AWSMock.mock( - 'S3', - 'getObject', - // eslint-disable-next-line @typescript-eslint/ban-types - (params: GetObjectRequest, callback: Function) => { - expect(params.Key).toBe('Patient/Patient-0.ndjson'); - callback(null, { Body: fakeFileBody, $response: {} }); - } - ); - await expect( - runScript(true, false, { - jobId: 'fakeJobId-1', - file_names: { Patient: ['Patient/Patient-0.ndjson'] } - }) - ).resolves; - }); - describe('verifyResource', () => { - test('nonBinary', async () => { - const fhirClient = axios.create(); - const healthLakeResource = { - resourceType: 'Patient', - id: 'unit_test_patient', - meta: { - tag: [] - } - }; - const fhirResource = { - resourceType: 'Patient', - id: 'unit_test_patient', - meta: { - tag: [] - } - }; - mock.onGet(/.*/g).reply(200, fhirResource); + AWSMock.mock( + 'S3', + 'getObject', + // eslint-disable-next-line @typescript-eslint/ban-types + (params: GetObjectRequest, callback: Function) => { + expect(params.Key).toBe('Patient/Patient-0.ndjson'); + callback(null, { Body: fakeFileBody, $response: {} }); + } + ); + await expect(runScript(true, false, { + jobId: 'fakeJobId-1', + file_names: + { Patient: ['Patient/Patient-0.ndjson'] } + })).resolves; + }) + describe('verifyResource', () => { + test('nonBinary', (async() => { + const fhirClient = axios.create(); + const healthLakeResource = { + "resourceType": "Patient", + "id": "unit_test_patient", + "meta": { + "tag": [] + } + }; + const fhirResource = { + "resourceType": "Patient", + "id": "unit_test_patient", + "meta": { + "tag": [] + } + }; + mock.onGet(/.*/g).reply(200, fhirResource); - await expect( - verifyResource(fhirClient, healthLakeResource, 'unit_test_patient', 'Patient') - ).resolves.toEqual(true); - }); + await expect(verifyResource(fhirClient, healthLakeResource, 'unit_test_patient', 'Patient')) + .resolves.toEqual(true); + })) - test('Binary', async () => { - const fhirClient = axios.create(); - const healthLakeResource = { - resourceType: 'Binary', - id: 'unit_test_binary', - data: 'fakeBinaryResourceData' - }; - const fhirResource = { - resourceType: 'Binary', - id: 'unit_test_binary', - presignedGetUrl: 'fakePresignedUrl' - }; - mock.onGet(/.*/g).reply(200, fhirResource); + test('Binary', (async() => { + const fhirClient = axios.create(); + const healthLakeResource = { + "resourceType": "Binary", + "id": "unit_test_binary", + "data": "fakeBinaryResourceData" + }; + const fhirResource = { + "resourceType": "Binary", + "id": "unit_test_binary", + "presignedGetUrl": "fakePresignedUrl" + }; + mock.onGet(/.*/g).reply(200, fhirResource); - await expect( - verifyResource(fhirClient, healthLakeResource, 'unit_test_binary', 'Binary') - ).resolves.toEqual(true); - }); - }); -}); + await expect(verifyResource(fhirClient, healthLakeResource, 'unit_test_binary', 'Binary')) + .resolves.toEqual(true); + })) + + }) +}) \ No newline at end of file diff --git a/fwoa-tools/src/migrationVerify.ts b/fwoa-tools/src/migrationVerify.ts index 0c797c30..3285cda9 100644 --- a/fwoa-tools/src/migrationVerify.ts +++ b/fwoa-tools/src/migrationVerify.ts @@ -127,19 +127,15 @@ export async function verifyFolderImport(smartClient: boolean, outputFile: Expor } } -export function buildRunScriptParams(): { smartClient: boolean; dryRun: boolean } { +export function buildRunScriptParams(): {smartClient: boolean, dryRun: boolean} { // eslint-disable-next-line @typescript-eslint/no-explicit-any const argv: any = parseCmdOptions(); const smartClient: boolean = argv.smart; const dryRun: boolean = argv.dryRun; - return { smartClient, dryRun }; + return {smartClient, dryRun}; } -export async function runScript( - smartClient: boolean, - dryRun: boolean, - outputFile: ExportOutput -): Promise { +export async function runScript(smartClient: boolean, dryRun: boolean, outputFile: ExportOutput): Promise { await checkConfiguration(logs, smartClient ? 'Smart' : 'Cognito'); if (!dryRun) { try { @@ -156,7 +152,7 @@ export async function runScript( (async () => { // Don't runScript when code is being imported for unit tests if (!process.env.UNIT_TEST) { - const { smartClient, dryRun } = buildRunScriptParams(); + const {smartClient, dryRun} = buildRunScriptParams(); // eslint-disable-next-line security/detect-non-literal-fs-filename const outputFile: ExportOutput = JSON.parse(readFileSync(EXPORT_STATE_FILE_NAME).toString()); await runScript(smartClient, dryRun, outputFile);