From c2d02ba2230f8150bc658db6f89eff65c65efba0 Mon Sep 17 00:00:00 2001 From: Nicolas Vuillamy Date: Mon, 18 Sep 2023 09:29:38 +0200 Subject: [PATCH] Slack integration (#422) * beta * stealth * Button to job * rhaaa * Allow multiple channels * fix * Fix slack actions * doc * Fix url link * again * PR info * branch * unfurl * find PR * PR info * prinfo * dbg * fix * fix * I'm walkng on sunshine * MegaLinter * Walking on sunshiiiiiine wohoooo * Author login * View PR * changelog * Reformat slack message + rename folder :) * dx * Factorize repoOwner & repoName * [Mega-Linter] Apply linters fixes :) * Slack notifs for gitlab * Send only on deployment success * cspell * [Mega-Linter] Apply linters fixes --------- Co-authored-by: nvuillam --- .github/linters/.cspell.json | 8 +- CHANGELOG.md | 10 +- README.md | 348 +++++++++--------- docs/deployTips.md | 14 +- docs/hardis/project/deploy/sources/dx.md | 4 +- ...alesforce-ci-cd-setup-integration-slack.md | 37 ++ ...alesforce-ci-cd-setup-integrations-home.md | 6 +- mkdocs.yml | 3 +- package.json | 1 + .../hardis/project/deploy/sources/dx.ts | 48 ++- src/common/gitProvider/gitProviderRoot.ts | 15 + src/common/gitProvider/github.ts | 123 ++++++- src/common/gitProvider/gitlab.ts | 49 ++- src/common/gitProvider/index.ts | 24 ++ src/common/notifProvider/index.ts | 31 ++ src/common/notifProvider/notifProviderRoot.ts | 16 + src/common/notifProvider/slackProvider.ts | 85 +++++ src/common/utils/classUtils.ts | 13 +- yarn.lock | 111 ++++++ 19 files changed, 725 insertions(+), 221 deletions(-) create mode 100644 docs/salesforce-ci-cd-setup-integration-slack.md create mode 100644 src/common/notifProvider/index.ts create mode 100644 src/common/notifProvider/notifProviderRoot.ts create mode 100644 src/common/notifProvider/slackProvider.ts diff --git a/.github/linters/.cspell.json b/.github/linters/.cspell.json index e54fc9681..dcbbe05cb 100644 --- a/.github/linters/.cspell.json +++ b/.github/linters/.cspell.json @@ -44,11 +44,11 @@ "FSEDS", "Facturation", "Flexi", + "GHSA", "GITLEAKS", + "GRYPE", "Gagne", - "GHSA", "Gmail", - "GRYPE", "HADOLINT", "Hardis", "IVMA", @@ -92,11 +92,11 @@ "SAST", "SEMGREP", "SOMENAMESPACE", + "STYLELINT", "Scontrol", "Scratches", "Sfdc", "Sfdx", - "STYLELINT", "Sublicensing", "Suspendre", "Syst\u00e8me", @@ -385,6 +385,7 @@ "ignoreerrors", "ignorerights", "ignorewarnings", + "iids", "includemanaged", "includepackages", "includeprofiles", @@ -449,6 +450,7 @@ "minimumapiversion", "mkdir", "mkdocs", + "mrkdwn", "multiselect", "mutingpermissionset", "mutingpermissionsets", diff --git a/CHANGELOG.md b/CHANGELOG.md index fbedcb63a..036584b7d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,14 @@ Note: Can be used with `sfdx plugins:install sfdx-hardis@beta` and docker image `hardisgroupcom/sfdx-hardis@beta` -- Native BitBucket compliance for PR deployment checks and deployments to major orgs after merge +- sfdx-hardis & Slack Integration + - Easy configuration + - Deployment notifications to a common channel, and also to git branch dedicated channel -- Added new option --testlevel RunRepositoryTests which will dynamically detect all GIT repository test classes and runs the deployment with found tests. This will speed up the validation/deployment on cases where GIT repository module contains subset of all tests found in the org -- Added --runtests support in order to pass certain APEX test classes when --testlevel RunSpecifiedTests is used +- **hardis:project:deploy:dx** enhancements: + - Added new option --testlevel RunRepositoryTests which will dynamically detect all GIT repository test classes and runs the deployment with found tests. This will speed up the validation/deployment on cases where GIT repository module contains subset of all tests found in the org + - Added --runtests support in order to pass certain APEX test classes when --testlevel RunSpecifiedTests is used +- Native BitBucket compliance for PR deployment checks and deployments to major orgs after merge ## [4.5.1] 2023-09-11 diff --git a/README.md b/README.md index 48d9bcfe3..32b18f58a 100644 --- a/README.md +++ b/README.md @@ -985,7 +985,7 @@ DESCRIPTION - listViewsMine ``` - - Example of sfdx-hardis.yml property `listViewsToSetToMine`: +- Example of sfdx-hardis.yml property `listViewsToSetToMine`: ```yaml listViewsToSetToMine: @@ -997,7 +997,7 @@ DESCRIPTION - "force-app/main/default/objects/Account/listViews/MyActivePartners.listView-meta.xml" ``` - - If manually written, this could also be: +- If manually written, this could also be: ```yaml listViewsToSetToMine: @@ -1009,7 +1009,7 @@ DESCRIPTION - "Account:MyActivePartners" ``` - Troubleshooting: if you need to run this command from an alpine-linux based docker image, use this workaround in your + Troubleshooting: if you need to run this command from an alpine-linux based docker image, use this workaround in your dockerfile: ```dockerfile @@ -1033,8 +1033,8 @@ Generates full org package.xml, including managed items ``` USAGE - $ sfdx hardis:org:generate:packagexmlfull [--outputfile ] [-d] [--websocket ] [--skipauth] [-u - ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:generate:packagexmlfull [--outputfile ] [-d] [--websocket ] [--skipauth] [-u + ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1073,7 +1073,7 @@ Purge apex logs in selected org ``` USAGE - $ sfdx hardis:org:purge:apexlog [-z] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] + $ sfdx hardis:org:purge:apexlog [-z] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1112,8 +1112,8 @@ Purge Obsolete flow versions to avoid the 50 max versions limit. Filters on Stat ``` USAGE - $ sfdx hardis:org:purge:flow [-z] [-n ] [-s ] [-f] [-r ] [-d] [--websocket ] - [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:purge:flow [-z] [-n ] [-s ] [-f] [-r ] [-d] [--websocket ] + [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1162,7 +1162,7 @@ EXAMPLES ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS 30109000000kX7uAAE TestFlow 2 test flowwww Obsolete - $ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft --name + $ sfdx hardis:org:purge:flow --targetusername nicolas.vuillamy@gmail.com --status "Obsolete,Draft,InvalidDraft --name TestFlow" Found 4 records: ID MASTERLABEL VERSIONNUMBER DESCRIPTION STATUS @@ -1182,7 +1182,7 @@ Retrieve package configuration from an org ``` USAGE - $ sfdx hardis:org:retrieve:packageconfig [-d] [--websocket ] [--skipauth] [-u ] [--apiversion + $ sfdx hardis:org:retrieve:packageconfig [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1218,7 +1218,7 @@ Retrieve all CRM Analytics sources from an org, with workarounds for SFDX bugs ``` USAGE - $ sfdx hardis:org:retrieve:sources:analytics [-d] [--websocket ] [--skipauth] [-u ] [--apiversion + $ sfdx hardis:org:retrieve:sources:analytics [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1253,8 +1253,8 @@ Retrieve Salesforce DX project from org ``` USAGE - $ sfdx hardis:org:retrieve:sources:dx [-f ] [-t ] [-k ] [-m ] [-o] [-r ] [-d] - [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:retrieve:sources:dx [-f ] [-t ] [-k ] [-m ] [-o] [-r ] [-d] + [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1304,8 +1304,8 @@ Retrieve Salesforce DX project from org ``` USAGE - $ sfdx hardis:org:retrieve:sources:dx2 [-x ] [-t ] [-d] [--websocket ] [--skipauth] [-u - ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:retrieve:sources:dx2 [-x ] [-t ] [-d] [--websocket ] [--skipauth] [-u + ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1345,8 +1345,8 @@ Retrieve Salesforce DX project from org ``` USAGE - $ sfdx hardis:org:retrieve:sources:metadata [-f ] [-p ] [--includemanaged] [-r ] [-d] - [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:retrieve:sources:metadata [-f ] [-p ] [--includemanaged] [-r ] [-d] + [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1387,9 +1387,9 @@ Retrieve changes from org link to a ref branch not present in sources ``` USAGE - $ sfdx hardis:org:retrieve:sources:retrofit [--commit] [--commitmode updated|all] [--push] [--pushmode - default|mergerequest] [--productionbranch ] [--retrofittargetbranch ] [-d] [--websocket ] - [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:retrieve:sources:retrofit [--commit] [--commitmode updated|all] [--push] [--pushmode + default|mergerequest] [--productionbranch ] [--retrofittargetbranch ] [-d] [--websocket ] + [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1438,8 +1438,8 @@ OPTIONS Hardis UI integration DESCRIPTION - This command need to be triggered from a branch that is connected to a SF org. It will then retrieve all changes not - present in that branch sources, commit them and create a merge request against the default branch. If a merge request + This command need to be triggered from a branch that is connected to a SF org. It will then retrieve all changes not + present in that branch sources, commit them and create a merge request against the default branch. If a merge request already exists, it will simply add a new commit. Define the following properties in **.sfdx-hardis.yml** @@ -1492,7 +1492,7 @@ DESCRIPTION EXAMPLES $ sfdx hardis:org:retrieve:sources:retrofit sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --commit --commitmode updated - sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode + sfdx hardis:org:retrieve:sources:retrofit --productionbranch master --retrofitbranch preprod --commit --commitmode updated --push --pushmode mergerequest ``` @@ -1504,7 +1504,7 @@ Interactive org selection for user ``` USAGE - $ sfdx hardis:org:select [-h] [-s] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:org:select [-h] [-s] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1537,8 +1537,8 @@ Run apex tests in Salesforce org ``` USAGE - $ sfdx hardis:org:test:apex [-l NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-d] [--websocket - ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:test:apex [-l NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-d] [--websocket + ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1568,8 +1568,8 @@ OPTIONS DESCRIPTION If following configuration is defined, it will fail if apex coverage target is not reached: - - Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` - - Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` +- Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` +- Env `APEX_TESTS_MIN_COVERAGE_ORG_WIDE` or `.sfdx-hardis` property `apexTestsMinCoverageOrgWide` You can override env var SFDX_TEST_WAIT_MINUTES to wait more than 60 minutes @@ -1585,7 +1585,7 @@ Update sandbox users so their email is valid ``` USAGE - $ sfdx hardis:org:user:activateinvalid [-p ] [-d] [--websocket ] [--skipauth] [-u ] + $ sfdx hardis:org:user:activateinvalid [-p ] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1618,14 +1618,14 @@ DESCRIPTION See article below - [![Reactivate all the sandbox users with .invalid emails in 3 + [![Reactivate all the sandbox users with .invalid emails in 3 clicks](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-invalid-email.jpg)](https:// nicolas.vuillamy.fr/reactivate-all-the-sandbox-users-with-invalid-emails-in-3-clicks-2265af4e3a3d) EXAMPLES $ sfdx hardis:org:user:activateinvalid $ sfdx hardis:org:user:activateinvalid --targetusername myuser@myorg.com - $ sfdx hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --targetusername + $ sfdx hardis:org:user:activateinvalid --profiles 'System Administrator,MyCustomProfile' --targetusername myuser@myorg.com ``` @@ -1637,8 +1637,8 @@ Mass freeze users in org before a maintenance or go live ``` USAGE - $ sfdx hardis:org:user:freeze [-n ] [-p ] [-e ] [-m ] [-d] [--websocket ] - [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:user:freeze [-n ] [-p ] [-e ] [-m ] [-d] [--websocket ] + [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1677,7 +1677,7 @@ DESCRIPTION - [![How to freeze / unfreeze users during a Salesforce + [![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://med ium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) @@ -1696,8 +1696,8 @@ Mass unfreeze users in org after a maintenance or go live ``` USAGE - $ sfdx hardis:org:user:unfreeze [-n ] [-p ] [-e ] [-m ] [-d] [--websocket ] - [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:org:user:unfreeze [-n ] [-p ] [-e ] [-m ] [-d] [--websocket ] + [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1736,7 +1736,7 @@ DESCRIPTION - [![How to freeze / unfreeze users during a Salesforce + [![How to freeze / unfreeze users during a Salesforce deployment](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-freeze.jpg)](https://med ium.com/@dimitrimonge/freeze-unfreeze-users-during-salesforce-deployment-8a1488bf8dd3) @@ -1755,7 +1755,7 @@ Create a new package ``` USAGE - $ sfdx hardis:package:create [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] + $ sfdx hardis:package:create [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1790,7 +1790,7 @@ Install a package in an org using its id (starting with **04t**) ``` USAGE - $ sfdx hardis:package:install [-p ] [-d] [--websocket ] [-k ] [--skipauth] [-u ] + $ sfdx hardis:package:install [-p ] [-d] [--websocket ] [-k ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1834,7 +1834,7 @@ Select and merge package.xml files ``` USAGE - $ sfdx hardis:package:mergexml [-f ] [-p ] [-x ] [-r ] [--websocket ] + $ sfdx hardis:package:mergexml [-f ] [-p ] [-x ] [-r ] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1846,7 +1846,7 @@ OPTIONS -r, --result=result Result package.xml file name - -x, --pattern=pattern [default: /**/*package*.xml] Name + -x, --pattern=pattern [default: /**/_package_.xml] Name criteria to list package.xml files --json format output as json @@ -1863,7 +1863,7 @@ OPTIONS EXAMPLES $ sfdx hardis:package:mergexml $ sfdx hardis:package:mergexml --folder packages --pattern /**/*.xml --result myMergedPackage.xml - $ sfdx hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" + $ sfdx hardis:package:mergexml --packagexmls "config/mypackage1.xml,config/mypackage2.xml,config/mypackage3.xml" --result myMergedPackage.xml ``` @@ -1875,8 +1875,8 @@ Create a new version of an unlocked package ``` USAGE - $ sfdx hardis:package:version:create [-d] [-p ] [-k ] [--deleteafter] [-i] [--websocket ] - [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:package:version:create [-d] [-p ] [-k ] [--deleteafter] [-i] [--websocket ] + [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1923,7 +1923,7 @@ List versions of unlocked package ``` USAGE - $ sfdx hardis:package:version:list [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:package:version:list [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1958,7 +1958,7 @@ Promote package(s) version(s): convert it from beta to released ``` USAGE - $ sfdx hardis:package:version:promote [-d] [-d] [--websocket ] [--skipauth] [-v ] [--apiversion + $ sfdx hardis:package:version:promote [-d] [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -1997,7 +1997,7 @@ Audit API version ``` USAGE - $ sfdx hardis:project:audit:apiversion [-m ] [-f] [-d] [--websocket ] [--skipauth] [--json] + $ sfdx hardis:project:audit:apiversion [-m ] [-f] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2032,7 +2032,7 @@ Generate list of callIn and callouts from sfdx project ``` USAGE - $ sfdx hardis:project:audit:callincallout [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:audit:callincallout [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2092,7 +2092,7 @@ Generate list of remote sites ``` USAGE - $ sfdx hardis:project:audit:remotesites [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:audit:remotesites [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2120,7 +2120,7 @@ Remove unwanted empty items within sfdx project sources ``` USAGE - $ sfdx hardis:project:clean:emptyitems [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:clean:emptyitems [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2149,7 +2149,7 @@ Remove unwanted hidden items within sfdx project sources ``` USAGE - $ sfdx hardis:project:clean:hiddenitems [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:clean:hiddenitems [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2178,7 +2178,7 @@ Replace Mine by Everything in ListView, and log the replacements in sfdx-hardis. ``` USAGE - $ sfdx hardis:project:clean:listviews [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:clean:listviews [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2237,7 +2237,7 @@ Remove all profile attributes that exist on Permission Sets ``` USAGE - $ sfdx hardis:project:clean:minimizeprofiles [-f ] [-d] [--websocket ] [--skipauth] [--json] + $ sfdx hardis:project:clean:minimizeprofiles [-f ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2259,18 +2259,18 @@ DESCRIPTION Salesforce will deprecate such capability in Spring 26. - Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that + Don't wait for that, and use minimizeProfiles cleaning to automatically remove from Profiles any permission that exists on a Permission Set ! The following XML tags are removed automatically: - - classAccesses - - customMetadataTypeAccesses - - externalDataSourceAccesses - - fieldPermissions - - objectPermissions - - pageAccesses - - userPermissions (except on Admin Profile) +- classAccesses +- customMetadataTypeAccesses +- externalDataSourceAccesses +- fieldPermissions +- objectPermissions +- pageAccesses +- userPermissions (except on Admin Profile) You can override this list by defining a property minimizeProfilesNodesToRemove in your .sfdx-hardis.yml config file. @@ -2286,7 +2286,7 @@ Clean SFDX sources from items present neither in target org nor local package.xm ``` USAGE - $ sfdx hardis:project:clean:orgmissingitems [-f ] [-p ] [-t ] [-d] [--websocket ] + $ sfdx hardis:project:clean:orgmissingitems [-f ] [-p ] [-t ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2329,7 +2329,7 @@ Remove unwanted references within sfdx project sources ``` USAGE - $ sfdx hardis:project:clean:references [-t ] [-c ] [-d] [--websocket ] [--skipauth] [--json] + $ sfdx hardis:project:clean:references [-t ] [-c ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2369,7 +2369,7 @@ Retrieve dashboards, documents and report folders in DX sources. Use -u ORGALIAS ``` USAGE - $ sfdx hardis:project:clean:retrievefolders [-d] [--websocket ] [--skipauth] [-u ] [--apiversion + $ sfdx hardis:project:clean:retrievefolders [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2404,7 +2404,7 @@ Remove unwanted standard items within sfdx project sources ``` USAGE - $ sfdx hardis:project:clean:standarditems [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:clean:standarditems [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2432,7 +2432,7 @@ Clean System.debug() lines in APEX Code (classes and triggers) ``` USAGE - $ sfdx hardis:project:clean:systemdebug [-f ] [--websocket ] [--skipauth] [-d] [--json] [--loglevel + $ sfdx hardis:project:clean:systemdebug [-f ] [--websocket ] [--skipauth] [-d] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2461,7 +2461,7 @@ Remove XML elements using Glob patterns and XPath expressions ``` USAGE - $ sfdx hardis:project:clean:xml [-f ] [-p -x ] [-n ] [-d] [--websocket ] + $ sfdx hardis:project:clean:xml [-f ] [-p -x ] [-n ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2493,13 +2493,13 @@ OPTIONS DESCRIPTION This can be very useful to avoid to always remove manually the same elements in the same XML file. - - **globpattern** can be any glob pattern allowing to identify the XML files to update, for example +- **globpattern** can be any glob pattern allowing to identify the XML files to update, for example `/**/*.flexipage-meta.xml` - - **xpath** can be any xpath following the format `//ns:PARENT-TAG-NAME//ns:TAG-NAME[contains(text(),'TAG-VALUE')]`. +- **xpath** can be any xpath following the format `//ns:PARENT-TAG-NAME//ns:TAG-NAME[contains(text(),'TAG-VALUE')]`. If an element is found, the whole **PARENT-TAG-NAME** (with its subtree) will be removed. - ![How to build cleaning + ![How to build cleaning XPath](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/doc-clean-xml.jpg) Note: If globpattern and xpath are not sent, elements defined in property **cleanXmlPatterns** in **.sfdx-hardis.yml** @@ -2507,7 +2507,7 @@ DESCRIPTION EXAMPLES $ sfdx hardis:project:clean:xml - $ sfdx hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath + $ sfdx hardis:project:clean:xml --globpattern "/**/*.flexipage-meta.xml" --xpath "//ns:flexiPageRegions//ns:name[contains(text(),'dashboardName')]" ``` @@ -2519,7 +2519,7 @@ Configure authentication from git branch to target org ``` USAGE - $ sfdx hardis:project:configure:auth [-b] [-d] [--websocket ] [--skipauth] [-v ] [-u ] + $ sfdx hardis:project:configure:auth [-b] [-d] [--websocket ] [--skipauth] [-v ] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2558,7 +2558,7 @@ Creates permission sets from existing profiles, with id PS_PROFILENAME ``` USAGE - $ sfdx hardis:project:convert:profilestopermsets [-e ] [-d] [--websocket ] [--skipauth] [--json] + $ sfdx hardis:project:convert:profilestopermsets [-e ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2587,7 +2587,7 @@ Create a new SFDX Project ``` USAGE - $ sfdx hardis:project:create [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:create [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2615,8 +2615,8 @@ Deploy SFDX source to org, following deploymentPlan in .sfdx-hardis.yml ``` USAGE - $ sfdx hardis:project:deploy:sources:dx [-c] [-l NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-p - ] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel + $ sfdx hardis:project:deploy:sources:dx [-c] [-l NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-p + ] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2650,41 +2650,41 @@ OPTIONS Hardis UI integration DESCRIPTION - In case of errors, [tips to fix them](https://sfdx-hardis.cloudity.com/deployTips/) will be included within the error + In case of errors, [tips to fix them](https://sfdx-hardis.cloudity.com/deployTips/) will be included within the error messages. - ### Quick Deploy +### Quick Deploy - In case Pull Request comments are configured on the project, Quick Deploy will try to be used (equivalent to button + In case Pull Request comments are configured on the project, Quick Deploy will try to be used (equivalent to button Quick Deploy) If you do not want to use QuickDeploy, define variable `SFDX_HARDIS_QUICK_DEPLOY=false` - - [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/) - - [Gitlab Merge requests notes config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/) - - [Azure Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/) +- [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/) +- [Gitlab Merge requests notes config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/) +- [Azure Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/) - ### Dynamic deployment items / Overwrite management +### Dynamic deployment items / Overwrite management If necessary,you can define the following files (that supports wildcards *): - - `manifest/packageDeployOnce.xml`: Every element defined in this file will be deployed only if it is not existing yet +- `manifest/packageDeployOnce.xml`: Every element defined in this file will be deployed only if it is not existing yet in the target org (can be useful with ListView for example, if the client wants to update them directly in production org) - - `manifest/packageXmlOnChange.xml`: Every element defined in this file will not be deployed if it already has a +- `manifest/packageXmlOnChange.xml`: Every element defined in this file will not be deployed if it already has a similar definition in target org (can be useful for SharingRules for example) See [Overwrite management documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-config-overwrite/) - ### Deployment plan +### Deployment plan If you need to deploy in multiple steps, you can define a property `deploymentPlan` in `.sfdx-hardis.yml`. - - If a file `manifest/package.xml` is found, it will be placed with order 0 in the deployment plan +- If a file `manifest/package.xml` is found, it will be placed with order 0 in the deployment plan - - If a file `manifest/destructiveChanges.xml` is found, it will be executed as --postdestructivechanges +- If a file `manifest/destructiveChanges.xml` is found, it will be executed as --postdestructivechanges - - If env var `SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES` is defined as `false` , split of package.xml will be applied +- If env var `SFDX_HARDIS_DEPLOY_IGNORE_SPLIT_PACKAGES` is defined as `false` , split of package.xml will be applied Example: @@ -2700,13 +2700,13 @@ DESCRIPTION waitAfter: 30 ``` - ### Packages installation +### Packages installation You can define a list of package to install during deployments using property `installedPackages` - - If `INSTALL_PACKAGES_DURING_CHECK_DEPLOY` is defined as `true` (or `installPackagesDuringCheckDeploy: true` in +- If `INSTALL_PACKAGES_DURING_CHECK_DEPLOY` is defined as `true` (or `installPackagesDuringCheckDeploy: true` in `.sfdx-hardis.yml`), packages will be installed even if the command is called with `--check` mode - - You can automatically update this property by listing all packages installed on an org using command `sfdx +- You can automatically update this property by listing all packages installed on an org using command `sfdx hardis:org:retrieve:packageconfig` Example: @@ -2736,11 +2736,11 @@ DESCRIPTION installDuringDeployments: true ``` - ### Automated fixes post deployments +### Automated fixes post deployments - #### List view with scope Mine +#### List view with scope Mine - If you defined a property **listViewsToSetToMine** in your .sfdx-hardis.yml, related ListViews will be set to Mine ( + If you defined a property **listViewsToSetToMine** in your .sfdx-hardis.yml, related ListViews will be set to Mine ( see command ) Example: @@ -2755,7 +2755,7 @@ DESCRIPTION - "Account:MyActivePartners" ``` - Troubleshooting: if you need to fix ListViews with mine from an alpine-linux based docker image, use this workaround + Troubleshooting: if you need to fix ListViews with mine from an alpine-linux based docker image, use this workaround in your dockerfile: ```dockerfile @@ -2766,7 +2766,7 @@ DESCRIPTION ENV PUPPETEER_EXECUTABLE_PATH="$\{CHROMIUM_PATH}" // remove \ before { ``` - If you need to increase the deployment waiting time (force:source:deploy --wait arg), you can define env var + If you need to increase the deployment waiting time (force:source:deploy --wait arg), you can define env var SFDX_DEPLOY_WAIT_MINUTES EXAMPLES @@ -2782,8 +2782,8 @@ Deploy metadatas to source org ``` USAGE - $ sfdx hardis:project:deploy:sources:metadata [-c] [-x ] [-p ] [-f] [-k ] [-l - NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-d] [--websocket ] [--skipauth] [-u ] + $ sfdx hardis:project:deploy:sources:metadata [-c] [-x ] [-p ] [-f] [-k ] [-l + NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2834,7 +2834,7 @@ Fix flexipages for apiVersion v53 (Winter22). ``` USAGE - $ sfdx hardis:project:fix:v53flexipages [-p ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:fix:v53flexipages [-p ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2869,8 +2869,8 @@ Generate package.xml git delta between 2 commits ``` USAGE - $ sfdx hardis:project:generate:gitdelta [--branch ] [--fromcommit ] [--tocommit ] [-d] - [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:generate:gitdelta [--branch ] [--fromcommit ] [--tocommit ] [-d] + [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2902,7 +2902,7 @@ Apply syntactic analysis (linters) on the repository sources, using Mega-Linter ``` USAGE - $ sfdx hardis:project:lint [-f] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] + $ sfdx hardis:project:lint [-f] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2939,7 +2939,7 @@ find duplicate values in XML file(s). ``` USAGE - $ sfdx hardis:project:metadata:findduplicates [-f ] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:project:metadata:findduplicates [-f ] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -2956,7 +2956,7 @@ OPTIONS Hardis UI integration DESCRIPTION - Find duplicate values in XML file(s). Keys to be checked can be configured in `config/sfdx-hardis.yml` using property + Find duplicate values in XML file(s). Keys to be checked can be configured in `config/sfdx-hardis.yml` using property metadataDuplicateFindKeys. Default config : @@ -3008,7 +3008,7 @@ Create and initialize a scratch org or a source-tracked sandbox (config can be d ``` USAGE - $ sfdx hardis:scratch:create [-n] [-d] [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:create [-n] [-d] [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3038,15 +3038,15 @@ OPTIONS Hardis UI integration DESCRIPTION - - **Install packages** - - Use property `installedPackages` - - **Push sources** - - **Assign permission sets** - - Use property `initPermissionSets` - - **Run apex initialization scripts** - - Use property `scratchOrgInitApexScripts` - - **Load data** - - Use property `dataPackages` +- **Install packages** + - Use property `installedPackages` +- **Push sources** +- **Assign permission sets** + - Use property `initPermissionSets` +- **Run apex initialization scripts** + - Use property `scratchOrgInitApexScripts` +- **Load data** + - Use property `dataPackages` EXAMPLE $ sfdx hardis:scratch:create @@ -3060,7 +3060,7 @@ Assisted menu to delete scratch orgs associated to a DevHub ``` USAGE - $ sfdx hardis:scratch:delete [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] + $ sfdx hardis:scratch:delete [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3095,7 +3095,7 @@ Select a data storage service and configure information to build a scratch org p ``` USAGE - $ sfdx hardis:scratch:pool:create [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:pool:create [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3142,7 +3142,7 @@ Calls the related storage service to request api keys and secrets that allows a ``` USAGE - $ sfdx hardis:scratch:pool:localauth [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:pool:localauth [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3177,7 +3177,7 @@ Create enough scratch orgs to fill the pool ``` USAGE - $ sfdx hardis:scratch:pool:refresh [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:pool:refresh [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3212,7 +3212,7 @@ Reset scratch org pool (delete all scratches in the pool) ``` USAGE - $ sfdx hardis:scratch:pool:reset [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:pool:reset [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3247,7 +3247,7 @@ Displays all stored content of project scratch org pool if defined ``` USAGE - $ sfdx hardis:scratch:pool:view [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] + $ sfdx hardis:scratch:pool:view [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3282,7 +3282,7 @@ This commands pulls the updates you performed in your scratch or sandbox org, in ``` USAGE - $ sfdx hardis:scratch:pull [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] + $ sfdx hardis:scratch:pull [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3306,16 +3306,16 @@ OPTIONS Hardis UI integration DESCRIPTION - Then, you probably want to stage and commit the files containing the updates you want to keep, as explained in this + Then, you probably want to stage and commit the files containing the updates you want to keep, as explained in this video. - - - Calls sfdx force:source:pull under the hood - - If there are errors, proposes to automatically add erroneous item in `.forceignore`, then pull again - - If you want to always retrieve sources like CustomApplication that are not always detected as updates by +- Calls sfdx force:source:pull under the hood +- If there are errors, proposes to automatically add erroneous item in `.forceignore`, then pull again +- If you want to always retrieve sources like CustomApplication that are not always detected as updates by force:source:pull , you can define property **autoRetrieveWhenPull** in .sfdx-hardis.yml Example: @@ -3338,7 +3338,7 @@ Push local files to scratch org ``` USAGE - $ sfdx hardis:scratch:push [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] + $ sfdx hardis:scratch:push [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3376,11 +3376,11 @@ sfdx-hardis wrapper for sfdx force:source:deploy that displays tips to solve dep ``` USAGE - $ sfdx hardis:source:deploy [--soapdeploy] [-w ] [--predestructivechanges ] - [--postdestructivechanges ] [-f [-t | | [-q | -x | -m | -p | -c | -l - NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg | -r | -o | -g]]] [--resultsdir ] - [--coverageformatters ] [--junit] [--checkcoverage] [--debug] [--websocket ] [-u ] - [--apiversion ] [--verbose] [--json] [--loglevel + $ sfdx hardis:source:deploy [--soapdeploy] [-w ] [--predestructivechanges ] + [--postdestructivechanges ] [-f [-t | | [-q | -x | -m | -p | -c | -l + NoTestRun|RunSpecifiedTests|RunLocalTests|RunAllTestsInOrg | -r | -o | -g]]] [--resultsdir ] + [--coverageformatters ] [--junit] [--checkcoverage] [--debug] [--websocket ] [-u ] + [--apiversion ] [--verbose] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3431,34 +3431,34 @@ OPTIONS --websocket=websocket websocket DESCRIPTION - Additional to the base command wrapper: If using **--checkonly**, add options **--checkcoverage** and - **--coverageformatters json-summary** to check that org coverage is > 75% (or value defined in .sfdx-hardis.yml + Additional to the base command wrapper: If using **--checkonly**, add options **--checkcoverage** and + **--coverageformatters json-summary** to check that org coverage is > 75% (or value defined in .sfdx-hardis.yml property **apexTestsMinCoverageOrgWide**) You can also have deployment results as pull request comments, on: - - GitHub (see [GitHub Pull Requests comments +- GitHub (see [GitHub Pull Requests comments config](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-github/)) - - Gitlab (see [Gitlab integration +- Gitlab (see [Gitlab integration configuration](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-gitlab/)) - - Azure DevOps (see [Azure integration +- Azure DevOps (see [Azure integration configuration](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-setup-integration-azure/)) - [![Assisted solving of Salesforce deployments + [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](http s://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) Notes: - - You can disable coloring of errors in red by defining env variable SFDX_HARDIS_DEPLOY_ERR_COLORS=false +- You can disable coloring of errors in red by defining env variable SFDX_HARDIS_DEPLOY_ERR_COLORS=false [See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sf dx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) EXAMPLE - $ sfdx hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests - --postdestructivechanges ./manifest/destructiveChanges.xml --targetusername nicolas.vuillamy@cloudity.com.sfdxhardis + $ sfdx hardis:source:deploy -x manifest/package.xml --wait 60 --ignorewarnings --testlevel RunLocalTests + --postdestructivechanges ./manifest/destructiveChanges.xml --targetusername nicolas.vuillamy@cloudity.com.sfdxhardis --checkonly --checkcoverage --verbose --coverageformatters json-summary ``` @@ -3470,7 +3470,7 @@ sfdx-hardis wrapper for sfdx force:source:push that displays tips to solve deplo ``` USAGE - $ sfdx hardis:source:push [-f] [-w ] [-g] [--debug] [--websocket ] [-u ] [--apiversion + $ sfdx hardis:source:push [-f] [-w ] [-g] [--debug] [--websocket ] [-u ] [--apiversion ] [--quiet] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3497,7 +3497,7 @@ OPTIONS --websocket=websocket websocket DESCRIPTION - [![Assisted solving of Salesforce deployments + [![Assisted solving of Salesforce deployments errors](https://github.com/hardisgroupcom/sfdx-hardis/raw/main/docs/assets/images/article-deployment-errors.jpg)](http s://nicolas.vuillamy.fr/assisted-solving-of-salesforce-deployments-errors-47f3666a9ed0) @@ -3513,8 +3513,8 @@ sfdx-hardis wrapper for sfdx force:source:retrieve ``` USAGE - $ sfdx hardis:source:retrieve [-p | -x | -m ] [-w ] [-n ] [-f -t] [-d] - [--websocket ] [--skipauth] [-u ] [-a ] [--verbose] [--json] [--loglevel + $ sfdx hardis:source:retrieve [-p | -x | -m ] [-w ] [-n ] [-f -t] [-d] + [--websocket ] [--skipauth] [-u ] [-a ] [--verbose] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3553,8 +3553,8 @@ OPTIONS --websocket=websocket websocket DESCRIPTION - - If no retrieve constraint is sent, as assisted menu will request the list of metadatas to retrieve - - If no org is selected , an assisted menu will request the user to choose one +- If no retrieve constraint is sent, as assisted menu will request the list of metadatas to retrieve +- If no org is selected , an assisted menu will request the user to choose one [See documentation of Salesforce command](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sf dx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_retrieve) @@ -3568,7 +3568,7 @@ Assisted menu to start working on a Salesforce task. ``` USAGE - $ sfdx hardis:work:new [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] + $ sfdx hardis:work:new [-d] [--websocket ] [--skipauth] [-v ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3592,28 +3592,28 @@ OPTIONS Hardis UI integration DESCRIPTION - Advanced instructions in [Create New Task + Advanced instructions in [Create New Task documentation](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-create-new-task/) At the end of the command, it will allow you to work on either a scratch org or a sandbox, depending on your choices. Under the hood, it can: - - Make **git pull** to be up to date with target branch - - Create **new git branch** with formatted name (you can override the choices using .sfdx-hardis.yml property +- Make **git pull** to be up to date with target branch +- Create **new git branch** with formatted name (you can override the choices using .sfdx-hardis.yml property **branchPrefixChoices**) - - Create and initialize a scratch org or a source-tracked sandbox (config can be defined using +- Create and initialize a scratch org or a source-tracked sandbox (config can be defined using `config/.sfdx-hardis.yml`): - - (and for scratch org only for now): - - **Install packages** - - Use property `installedPackages` - - **Push sources** - - **Assign permission sets** - - Use property `initPermissionSets` - - **Run apex initialization scripts** - - Use property `scratchOrgInitApexScripts` - - **Load data** - - Use property `dataPackages` +- (and for scratch org only for now): + - **Install packages** + - Use property `installedPackages` + - **Push sources** + - **Assign permission sets** + - Use property `initPermissionSets` + - **Run apex initialization scripts** + - Use property `scratchOrgInitApexScripts` + - **Load data** + - Use property `dataPackages` EXAMPLE $ sfdx hardis:work:task:new @@ -3627,7 +3627,7 @@ Make my local branch and my scratch org up to date with the most recent sources ``` USAGE - $ sfdx hardis:work:refresh [-n] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] + $ sfdx hardis:work:refresh [-n] [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3665,7 +3665,7 @@ Resets the selection that we want to add in the merge request ``` USAGE - $ sfdx hardis:work:resetselection [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] + $ sfdx hardis:work:resetselection [-d] [--websocket ] [--skipauth] [-u ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3704,7 +3704,7 @@ When a work task is completed, guide user to create a merge request ``` USAGE $ sfdx hardis:work:save [-n] [-g] [-c] [--auto] [--targetbranch ] [-d] [--websocket ] [--skipauth] [-u - ] [--apiversion ] [--json] [--loglevel + ] [--apiversion ] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS @@ -3740,12 +3740,12 @@ OPTIONS DESCRIPTION Advanced instructions in [Publish a task](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-publish-task/) - - Generate package-xml diff using sfdx-git-delta - - Automatically update `manifest/package.xml` and `manifest/destructiveChanges.xml` according to the committed updates - - Automatically Clean XML files using `.sfdx-hardis.yml` properties - - `autocleantypes`: List of auto-performed sources cleanings, available on command +- Generate package-xml diff using sfdx-git-delta +- Automatically update `manifest/package.xml` and `manifest/destructiveChanges.xml` according to the committed updates +- Automatically Clean XML files using `.sfdx-hardis.yml` properties + - `autocleantypes`: List of auto-performed sources cleanings, available on command [hardis:project:clean:references](https://sfdx-hardis.cloudity.com/hardis/project/clean/references/) - - `autoRemoveUserPermissions`: List of userPermission to automatically remove from profile metadatas + - `autoRemoveUserPermissions`: List of userPermission to automatically remove from profile metadatas Example: @@ -3767,7 +3767,7 @@ DESCRIPTION - WorkCalibrationUser ``` - - Push commit to server +- Push commit to server EXAMPLES $ sfdx hardis:work:task:save @@ -3782,7 +3782,7 @@ Technical calls to WebSocket functions ``` USAGE - $ sfdx hardis:work:ws [-e ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel + $ sfdx hardis:work:ws [-e ] [-d] [--websocket ] [--skipauth] [--json] [--loglevel trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL] OPTIONS diff --git a/docs/deployTips.md b/docs/deployTips.md index 5b8a17d91..0affeb63b 100644 --- a/docs/deployTips.md +++ b/docs/deployTips.md @@ -8,11 +8,11 @@ description: Learn how to fix issues that can happen during sfdx deployments This page summarizes all errors that can be detected by sfdx-hardis wrapper commands -| sfdx command | sfdx-hardis wrapper command | -| :----------- | :-------------------------- | -| [sfdx force:source:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) | [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) | -| [sfdx force:source:push](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_push) | [sfdx hardis:source:push](https://sfdx-hardis.cloudity.com/hardis/source/push/) | -| [sfdx force:mdapi:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_beta_deploy) | [sfdx hardis:mdapi:deploy](https://sfdx-hardis.cloudity.com/hardis/mdapi/deploy/) | +| sfdx command | sfdx-hardis wrapper command | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------| +| [sfdx force:source:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_deploy) | [sfdx hardis:source:deploy](https://sfdx-hardis.cloudity.com/hardis/source/deploy/) | +| [sfdx force:source:push](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_source.htm#cli_reference_force_source_push) | [sfdx hardis:source:push](https://sfdx-hardis.cloudity.com/hardis/source/push/) | +| [sfdx force:mdapi:deploy](https://developer.salesforce.com/docs/atlas.en-us.sfdx_cli_reference.meta/sfdx_cli_reference/cli_reference_force_mdapi.htm#cli_reference_force_mdapi_beta_deploy) | [sfdx hardis:mdapi:deploy](https://sfdx-hardis.cloudity.com/hardis/mdapi/deploy/) | You can also use this function on a [sfdx-hardis Salesforce CI/CD project](https://sfdx-hardis.cloudity.com/salesforce-ci-cd-home/) @@ -302,7 +302,7 @@ You probably also need to add CRM Analytics Admin Permission Set assignment to t ## Error parsing file -- `Error (.*) Error parsing file: (.*) ` +- `Error (.*) Error parsing file: (.*)` **Resolution tip** @@ -902,7 +902,7 @@ Please check https://developer.salesforce.com/forums/?id=9060G0000005kVLQAY ## Test classes with 0% coverage -- ` 0%` +- `0%` **Resolution tip** diff --git a/docs/hardis/project/deploy/sources/dx.md b/docs/hardis/project/deploy/sources/dx.md index 4c1ebced5..beaaae1b3 100644 --- a/docs/hardis/project/deploy/sources/dx.md +++ b/docs/hardis/project/deploy/sources/dx.md @@ -101,13 +101,13 @@ ENV PUPPETEER_EXECUTABLE_PATH="$\{CHROMIUM_PATH}" // remove \ before { ## Parameters -| Name | Type | Description | Default | Required | Options | +| Name | Type | Description | Default | Required | Options | |:----------------------|:-------:|:---------------------------------------------------------------------|:-------------:|:--------:|:---------------------------------------------------------------------------------------------:| | apiversion | option | override the api version used for api requests made by this command | | | | | check
-c | boolean | Only checks the deployment, there is no impact on target org | | | | | debug
-d | boolean | Activate debug mode (more logs) | | | | | json | boolean | format output as json | | | | -| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | +| loglevel | option | logging level for this command invocation | warn | | trace
debug
info
warn
error
fatal | | packagexml
-p | option | Path to package.xml containing what you want to deploy in target org | | | | | skipauth | boolean | Skip authentication check when a default username is required | | | | | targetusername
-u | option | username or alias for the target org; overrides default target org | | | | diff --git a/docs/salesforce-ci-cd-setup-integration-slack.md b/docs/salesforce-ci-cd-setup-integration-slack.md new file mode 100644 index 000000000..f9459bd70 --- /dev/null +++ b/docs/salesforce-ci-cd-setup-integration-slack.md @@ -0,0 +1,37 @@ +--- +title: Configure Integrations between sfdx-hardis and Slack +description: Send notifications on slack channels during CI/CD operations +--- + + +## Slack Integration + +You can receive notifications on slack channels when CI/CD events are happening: +- Deployment from a major branch to a major Salesforce org (ex: integration git branch to Integration Org) +- More soon (ask for them !) + +## Configure Slack Application + +### Create slack app + +> Process only if a sfdx-hardis bot has not yet been configured on your slack. Otherwise, just request the slack token value to your slack administrator + +Create a slack app here -> + +- Name it `sfdx-hardis bot`` or _any nickname you like_, like your guinea pig name ! +- Go to permissions and add the following scopes + - chat-write + - chat-write.customize + - chat-write.public +- Create auth token and copy its values + +### Configure sfdx-hardis for slack + +- Create a secret value named **SLACK_TOKEN** with auth token value in your Git provider configuration +- Create a slack channel that will receive all notifications (ex: _#notifs-sfdx-hardis_) +- Open the channel info, copy its ID and create a secret value named **SLACK_CHANNEL_ID** in your git provider configuration +- Additionally, you can create branch-scoped channels by creating new channels and create appropriate variables + - Example: Channel _#notifs-sfdx-hardis-integration_ and variable **SLACK_CHANNEL_ID_iNTEGRATION** +- Make sure all those variables are visible to your CI/CD pipelines + +That's all, you're all set ! \ No newline at end of file diff --git a/docs/salesforce-ci-cd-setup-integrations-home.md b/docs/salesforce-ci-cd-setup-integrations-home.md index 4445957d4..44f196e1a 100644 --- a/docs/salesforce-ci-cd-setup-integrations-home.md +++ b/docs/salesforce-ci-cd-setup-integrations-home.md @@ -29,9 +29,11 @@ Depending of your git provider, configure one of the following integrations. ## Message notifications +- [Slack](salesforce-ci-cd-setup-integration-slack.md) + - Notifications + - [Microsoft Teams](salesforce-ci-cd-setup-integration-ms-teams.md) - Notifications - Alerts -- Slack - - Coming soon ! + diff --git a/mkdocs.yml b/mkdocs.yml index fee5deffb..a46ab6e77 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -206,9 +206,10 @@ nav: - Init from Existing Org: salesforce-ci-cd-setup-existing-org.md - Integrations: - Integrations Home: salesforce-ci-cd-setup-integrations-home.md - - GitHub: salesforce-ci-cd-setup-integration-github.md + - GitHub: salesforce-ci-cd-setup-integration-github.md - Gitlab: salesforce-ci-cd-setup-integration-gitlab.md - Azure DevOps: salesforce-ci-cd-setup-integration-azure.md + - Slack: salesforce-ci-cd-setup-integration-slack.md - Microsoft Teams: salesforce-ci-cd-setup-integration-ms-teams.md - First merge request: salesforce-ci-cd-setup-merge-request.md - Configuration reference: sfdx-hardis-config-file.md diff --git a/package.json b/package.json index 8c6204ca4..ff8787acf 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "@salesforce/core": "^2.33.1", "@salesforce/ts-sinon": "^1.2.4", "@salesforce/ts-types": "^1.5.20", + "@slack/web-api": "^6.9.0", "@supercharge/promise-pool": "^1.7.0", "@types/mocha": "^8.2.1", "@types/ws": "^7.4.0", diff --git a/src/commands/hardis/project/deploy/sources/dx.ts b/src/commands/hardis/project/deploy/sources/dx.ts index d53ba657e..0d43554c0 100644 --- a/src/commands/hardis/project/deploy/sources/dx.ts +++ b/src/commands/hardis/project/deploy/sources/dx.ts @@ -17,12 +17,14 @@ import { AnyJson } from "@salesforce/ts-types"; import * as c from "chalk"; import * as fs from "fs-extra"; import { MetadataUtils } from "../../../../../common/metadata-utils"; -import { isCI, uxLog } from "../../../../../common/utils"; +import { getCurrentGitBranch, isCI, uxLog } from "../../../../../common/utils"; import { getConfig } from "../../../../../config"; import { forceSourceDeploy } from "../../../../../common/utils/deployUtils"; import { promptOrg } from "../../../../../common/utils/orgUtils"; import { getApexTestClasses } from "../../../../../common/utils/classUtils"; import { restoreListViewMine } from "../../../../../common/utils/orgConfigUtils"; +import { NotifProvider } from "../../../../../common/notifProvider"; +import { GitProvider } from "../../../../../common/gitProvider"; // Initialize Messages with the current plugin directory Messages.importMessagesDirectory(__dirname); @@ -198,23 +200,23 @@ If you need to increase the deployment waiting time (force:source:deploy --wait let testClasses = this.flags.runtests || this.configInfo.runtests || ""; // Auto-detect all APEX test classes within project in order to run "dynamic" RunSpecifiedTests deployment - if(givenTestlevel === 'RunRepositoryTests') { + if (givenTestlevel === "RunRepositoryTests") { const testClassList = await getApexTestClasses(); - if(Array.isArray(testClassList) && testClassList.length) { - this.flags.testlevel = 'RunSpecifiedTests'; + if (Array.isArray(testClassList) && testClassList.length) { + this.flags.testlevel = "RunSpecifiedTests"; testClasses = testClassList.join(); } else { // Default back to RunLocalTests in case if repository has zero tests - this.flags.testlevel = 'RunLocalTests'; - testClasses = ''; + this.flags.testlevel = "RunLocalTests"; + testClasses = ""; } } const testlevel = this.flags.testlevel || this.configInfo.testLevel || "RunLocalTests"; - + // Test classes are only valid for RunSpecifiedTests - if(testlevel != 'RunSpecifiedTests') { - testClasses = ''; + if (testlevel != "RunSpecifiedTests") { + testClasses = ""; } const packageXml = this.flags.packagexml || null; @@ -257,7 +259,7 @@ If you need to increase the deployment waiting time (force:source:deploy --wait const forceSourceDeployOptions: any = { targetUsername: targetUsername, conn: this.org?.getConnection(), - testClasses: testClasses + testClasses: testClasses, }; // Get destructiveChanges.xml and add it in options if existing const packageDeletedXmlFile = @@ -301,6 +303,32 @@ If you need to increase the deployment waiting time (force:source:deploy --wait await restoreListViewMine(this.configInfo.listViewsToSetToMine, this.org.getConnection(), { debug: this.debugMode }); } + // Send notification of deployment success + if (!check) { + const targetLabel = this.org?.getConnection()?.getUsername() === targetUsername ? this.org?.getConnection()?.instanceUrl : targetUsername; + const linkMarkdown = `<${targetLabel}|*${targetLabel.replace("https://", "").replace(".my.salesforce.com", "")}*>`; + const currentGitBranch = await getCurrentGitBranch(); + let branchMd = `*${currentGitBranch}*`; + const branchUrl = await GitProvider.getCurrentBranchUrl(); + if (branchUrl) { + branchMd = `<${branchUrl}|*${currentGitBranch}*>`; + } + let notifMessage = `Deployment has been successfully processed from branch ${branchMd} to org ${linkMarkdown}`; + const notifButtons = []; + const jobUrl = await GitProvider.getJobUrl(); + if (jobUrl) { + notifButtons.push({ text: "View Deployment Job", url: jobUrl }); + } + const pullRequestInfo = await GitProvider.getPullRequestInfo(); + if (pullRequestInfo) { + const prUrl = pullRequestInfo.web_url || pullRequestInfo.html_url || pullRequestInfo.url; + const prAuthor = pullRequestInfo?.author?.login || pullRequestInfo?.author?.name || null; + notifMessage += `\nRelated: <${prUrl}|${pullRequestInfo.title}>` + (prAuthor ? ` by ${prAuthor}` : ""); + const prButtonText = "View Pull Request"; + notifButtons.push({ text: prButtonText, url: prUrl }); + } + NotifProvider.postNotifications(notifMessage, notifButtons); + } return { orgId: this.org.getOrgId(), outputString: messages.join("\n") }; } } diff --git a/src/common/gitProvider/gitProviderRoot.ts b/src/common/gitProvider/gitProviderRoot.ts index 23b0f849e..a4d1eb633 100644 --- a/src/common/gitProvider/gitProviderRoot.ts +++ b/src/common/gitProvider/gitProviderRoot.ts @@ -16,6 +16,21 @@ export abstract class GitProviderRoot { return null; } + public async getCurrentJobUrl(): Promise { + uxLog(this, `Method getCurrentJobUrl is not implemented yet on ${this.getLabel()}`); + return null; + } + + public async getCurrentBranchUrl(): Promise { + uxLog(this, `Method getCurrentBranchUrl is not implemented yet on ${this.getLabel()}`); + return null; + } + + public async getPullRequestInfo(): Promise { + uxLog(this, `Method getPullRequestInfo is not implemented yet on ${this.getLabel()}`); + return null; + } + public async postPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { uxLog(this, c.yellow("Method postPullRequestMessage is not yet implemented on " + this.getLabel() + " to post " + JSON.stringify(prMessage))); return { posted: false, providerResult: { error: "Not implemented in sfdx-hardis" } }; diff --git a/src/common/gitProvider/github.ts b/src/common/gitProvider/github.ts index f0761a0a0..519f858c1 100644 --- a/src/common/gitProvider/github.ts +++ b/src/common/gitProvider/github.ts @@ -1,18 +1,23 @@ import * as github from "@actions/github"; import * as c from "chalk"; import { GitProviderRoot } from "./gitProviderRoot"; -import { uxLog } from "../utils"; +import { getCurrentGitBranch, git, uxLog } from "../utils"; import { PullRequestMessageRequest, PullRequestMessageResult } from "."; import { GitHub } from "@actions/github/lib/utils"; export class GithubProvider extends GitProviderRoot { private octokit: InstanceType; + private repoOwner: string; + private repoName: string; constructor() { super(); const tokenName = process.env.CI_SFDX_HARDIS_GITHUB_TOKEN ? "CI_SFDX_HARDIS_GITHUB_TOKEN" : process.env.PAT ? "PAT" : "GITHUB_TOKEN"; const token = process.env[tokenName]; this.octokit = github.getOctokit(token); + this.repoOwner = github?.context?.repo?.owner || null; + this.repoName = github?.context?.repo?.repo || null; + this.serverUrl = github?.context?.serverUrl || null; } public getLabel(): string { @@ -55,20 +60,116 @@ export class GithubProvider extends GitProviderRoot { return deploymentCheckId; } + // Returns current job URL + public async getCurrentJobUrl(): Promise { + try { + const runId = github?.context?.runId; + if (this.repoOwner && this.repoName && this.serverUrl && runId) { + return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/actions/runs/${runId}`; + } + } catch (err) { + uxLog(this, c.yellow("[GitHub integration]" + err.message)); + } + if (process.env.GITHUB_JOB_URL) { + return process.env.GITHUB_JOB_URL; + } + return null; + } + + // Returns current job URL + public async getCurrentBranchUrl(): Promise { + try { + const branch = github?.context?.ref || null; + if (this.repoOwner && this.repoName && this.serverUrl && branch) { + return `${this.serverUrl}/${this.repoOwner}/${this.repoName}/tree/${branch}`; + } + } catch (err) { + uxLog(this, c.yellow("[GitHub integration]" + err.message)); + } + return null; + } + + // Find pull request info + public async getPullRequestInfo(): Promise { + // Case when PR is found in the context + const prNumber = github?.context?.payload?.pull_request?.number || null; + if (prNumber !== null && this.repoOwner !== null && prNumber !== null) { + const pullRequest = await this.octokit.rest.pulls.get({ + owner: this.repoOwner, + repo: this.repoName, + pull_number: prNumber, + }); + if (pullRequest) { + return pullRequest.data; + } + } + // Case when we find PRs from a commit + const sha = await git().revparse(["HEAD"]); + let graphQlRes: any = null; + try { + graphQlRes = await this.octokit.graphql( + ` + query associatedPRs($sha: String, $repo: String!, $owner: String!){ + repository(name: $repo, owner: $owner) { + commit: object(expression: $sha) { + ... on Commit { + associatedPullRequests(first:10){ + edges{ + node{ + title + number + body + url + merged, + baseRef { + id + name + } + author { + login + } + } + } + } + } + } + } + } + `, + { + sha: sha, + repo: this.repoName, + owner: this.repoOwner, + }, + ); + } catch (error) { + uxLog(this, c.yellow(`[GitHub Integration] Error while calling GraphQL Api to list PR on commit ${sha}`)); + } + if (graphQlRes?.repository?.commit?.associatedPullRequests?.edges?.length > 0) { + const currentGitBranch = await getCurrentGitBranch(); + const candidatePullRequests = graphQlRes.repository.commit.associatedPullRequests.edges.filter( + (pr: any) => pr.node.merged === true && pr.node.baseRef.name === currentGitBranch, + ); + if (candidatePullRequests.length > 0) { + return candidatePullRequests[0].node; + } + } + uxLog(this, c.grey(`[GitHub Integration] Unable to find related Pull Request Info`)); + return null; + } + // Posts a note on the merge request public async postPullRequestMessage(prMessage: PullRequestMessageRequest): Promise { const { pull_request } = github.context.payload; // Get CI variables - const repoOwner = github?.context?.repo?.owner || null; - const repoName = github?.context?.repo?.repo || null; const pullRequestId = pull_request?.number || null; - if (repoName == null || pullRequestId == null) { + if (this.repoName == null || pullRequestId == null) { uxLog(this, c.grey("[GitHub integration] No project and merge request, so no note posted...")); return { posted: false, providerResult: { info: "No related pull request" } }; } const githubWorkflowName = github.context.workflow; - const githubJobUrl = `${github.context.serverUrl}/${repoOwner}/${repoName}/actions/runs/${github.context.runId}`; + const githubJobUrl = `${github.context.serverUrl}/${this.repoOwner}/${this.repoName}/actions/runs/${github.context.runId}`; // Build note message const messageKey = prMessage.messageKey + "-" + githubWorkflowName + "-" + pullRequestId; let messageBody = `**${prMessage.title || ""}** @@ -85,8 +186,8 @@ _Provided by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubW // Check for existing note from a previous run uxLog(this, c.grey("[GitHub integration] Listing comments of Pull Request...")); const existingComments = await this.octokit.rest.issues.listComments({ - owner: repoOwner, - repo: repoName, + owner: this.repoOwner, + repo: this.repoName, issue_number: pullRequestId, }); let existingCommentId = null; @@ -101,8 +202,8 @@ _Provided by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubW // Update existing note uxLog(this, c.grey("[GitHub integration] Updating Pull Request Comment on GitHub...")); const githubCommentEditResult = await this.octokit.rest.issues.updateComment({ - owner: repoOwner, - repo: repoName, + owner: this.repoOwner, + repo: this.repoName, issue_number: pullRequestId, comment_id: existingCommentId, body: messageBody, @@ -116,8 +217,8 @@ _Provided by [sfdx-hardis](https://sfdx-hardis.cloudity.com) from job [${githubW // Create new note if no existing not was found uxLog(this, c.grey("[GitHub integration] Adding Pull Request Comment on GitHub...")); const githubCommentCreateResult = await this.octokit.rest.issues.createComment({ - owner: repoOwner, - repo: repoName, + owner: this.repoOwner, + repo: this.repoName, issue_number: pullRequestId, body: messageBody, }); diff --git a/src/common/gitProvider/gitlab.ts b/src/common/gitProvider/gitlab.ts index 657a768a2..04739ea25 100644 --- a/src/common/gitProvider/gitlab.ts +++ b/src/common/gitProvider/gitlab.ts @@ -1,7 +1,7 @@ import { Gitlab } from "@gitbeaker/node"; import * as c from "chalk"; import { PullRequestMessageRequest, PullRequestMessageResult } from "."; -import { uxLog } from "../utils"; +import { getCurrentGitBranch, git, uxLog } from "../utils"; import { GitProviderRoot } from "./gitProviderRoot"; export class GitlabProvider extends GitProviderRoot { @@ -20,6 +20,53 @@ export class GitlabProvider extends GitProviderRoot { return "sfdx-hardis Gitlab connector"; } + // Returns current job URL + public async getCurrentJobUrl(): Promise { + if (process.env.CI_JOB_URL) { + return process.env.CI_JOB_URL; + } + return null; + } + + // Returns current job URL + public async getCurrentBranchUrl(): Promise { + if (process.env.CI_PROJECT_URL && process.env.CI_COMMIT_REF_NAME) return `${process.env.CI_PROJECT_URL}/-/tree/${process.env.CI_COMMIT_REF_NAME}`; + return null; + } + + // Find pull request info + public async getPullRequestInfo(): Promise { + // Case when MR is found in the context + const projectId = process.env.CI_PROJECT_ID || null; + const mrNumber = process.env.CI_MERGE_REQUEST_IID || null; + if (mrNumber !== null) { + const mergeRequests = await this.gitlabApi.MergeRequests.all({ + projectId: projectId, + iids: [parseInt(mrNumber)], + }); + if (mergeRequests.length > 0) { + return mergeRequests[0]; + } + } + // Case when we find MR from a commit + const sha = await git().revparse(["HEAD"]); + const latestMergeRequestsOnBranch = await this.gitlabApi.MergeRequests.all({ + projectId: projectId, + state: "merged", + sort: "desc", + sha: sha, + }); + if (latestMergeRequestsOnBranch.length > 0) { + const currentGitBranch = await getCurrentGitBranch(); + const candidateMergeRequests = latestMergeRequestsOnBranch.filter((pr) => pr.target_branch === currentGitBranch); + if (candidateMergeRequests.length > 0) { + return candidateMergeRequests[0]; + } + } + uxLog(this, c.grey(`[Gitlab Integration] Unable to find related Merge Request Info`)); + return null; + } + public async getBranchDeploymentCheckId(gitBranch: string): Promise { let deploymentCheckId = null; const projectId = process.env.CI_PROJECT_ID || null; diff --git a/src/common/gitProvider/index.ts b/src/common/gitProvider/index.ts index 65a44f9ff..675b055d0 100644 --- a/src/common/gitProvider/index.ts +++ b/src/common/gitProvider/index.ts @@ -92,6 +92,30 @@ export abstract class GitProvider { return null; } } + + static async getCurrentBranchUrl(): Promise { + const gitProvider = GitProvider.getInstance(); + if (gitProvider == null) { + return null; + } + return gitProvider.getCurrentBranchUrl(); + } + + static async getJobUrl(): Promise { + const gitProvider = GitProvider.getInstance(); + if (gitProvider == null) { + return null; + } + return gitProvider.getCurrentJobUrl(); + } + + static async getPullRequestInfo(): Promise { + const gitProvider = GitProvider.getInstance(); + if (gitProvider == null) { + return null; + } + return gitProvider.getPullRequestInfo(); + } } export declare type PullRequestMessageRequest = { diff --git a/src/common/notifProvider/index.ts b/src/common/notifProvider/index.ts new file mode 100644 index 000000000..c7e06e91d --- /dev/null +++ b/src/common/notifProvider/index.ts @@ -0,0 +1,31 @@ +import { uxLog } from "../utils"; +import * as c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot"; +import { SlackProvider } from "./slackProvider"; + +export abstract class NotifProvider { + static getInstances(): NotifProviderRoot[] { + const notifProviders: NotifProviderRoot[] = []; + // Slack + if (process.env.SLACK_TOKEN) { + notifProviders.push(new SlackProvider()); + } + return notifProviders; + } + + static postNotifications(notifMessage: string, buttons: any[] = []) { + const notifProviders = this.getInstances(); + for (const notifProvider of notifProviders) { + notifProvider.postNotification(notifMessage, buttons); + } + } + + public getLabel(): string { + return "get label should be implemented !"; + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async postNotification(notifMessage: string, buttons: any[] = []): Promise { + uxLog(this, c.grey("method postNotification is not implemented on " + this.getLabel())); + } +} diff --git a/src/common/notifProvider/notifProviderRoot.ts b/src/common/notifProvider/notifProviderRoot.ts new file mode 100644 index 000000000..9aebfeb74 --- /dev/null +++ b/src/common/notifProvider/notifProviderRoot.ts @@ -0,0 +1,16 @@ +import { SfdxError } from "@salesforce/core"; +import { uxLog } from "../utils"; + +export abstract class NotifProviderRoot { + protected token: string; + + public getLabel(): string { + throw new SfdxError("getLabel should be implemented on this call"); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async postNotification(_notifMessage: string, buttons: any[] = []): Promise { + uxLog(this, `Method postNotification is not implemented yet on ${this.getLabel()}`); + return; + } +} diff --git a/src/common/notifProvider/slackProvider.ts b/src/common/notifProvider/slackProvider.ts new file mode 100644 index 000000000..c9765e35c --- /dev/null +++ b/src/common/notifProvider/slackProvider.ts @@ -0,0 +1,85 @@ +import { SfdxError } from "@salesforce/core"; +import * as c from "chalk"; +import { NotifProviderRoot } from "./notifProviderRoot"; +import { ActionsBlock, Block, Button, SectionBlock, WebClient } from "@slack/web-api"; +import { getCurrentGitBranch, uxLog } from "../utils"; + +export class SlackProvider extends NotifProviderRoot { + private slackClient: InstanceType; + + constructor() { + super(); + this.token = process.env.SLACK_TOKEN; + this.slackClient = new WebClient(this.token); + } + + public getLabel(): string { + return "sfdx-hardis Slack connector"; + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async postNotification(notifMessage: string, buttons: any[] = []): Promise { + const mainNotifsChannelId = process.env.SLACK_CHANNEL_ID || null; + if (mainNotifsChannelId == null) { + throw new SfdxError( + "You need to define a variable SLACK_CHANNEL_ID to use sfdx-hardis Slack Integration. Otherwise, remove variable SLACK_TOKEN", + ); + } + const slackChannelsIds = [mainNotifsChannelId]; + // Add branch custom slack channel if defined + const customSlackChannelVariable = `SLACK_CHANNEL_ID_${(await getCurrentGitBranch()).toUpperCase()}`; + if (process.env[customSlackChannelVariable]) { + slackChannelsIds.push(process.env[customSlackChannelVariable]); + } + // Main block + const blocks: Block[] = []; + const block: SectionBlock = { + type: "section", + text: { + type: "mrkdwn", + text: notifMessage, + }, + }; + blocks.push(block); + // Add action blocks + if (buttons.length > 0) { + const actionElements = []; + for (const button of buttons) { + // Url button + if (button.url) { + const actionsElement: Button = { + type: "button", + text: { + type: "plain_text", + text: button.text, + }, + style: button.style || "primary", + url: button.url, + }; + actionElements.push(actionsElement); + } + } + const actionsBlock: ActionsBlock = { + type: "actions", + elements: actionElements, + }; + blocks.push(actionsBlock); + } + // Post messages + for (const slackChannelId of slackChannelsIds) { + try { + const resp = await this.slackClient.chat.postMessage({ + text: notifMessage, + blocks: blocks, + channel: slackChannelId, + unfurl_links: false, + unfurl_media: false, + }); + uxLog(this, c.gray(`Sent slack notification to channel ${mainNotifsChannelId}: ${resp.ok}`)); + } catch (error) { + uxLog(this, c.red(`Error while sending message to channel ${mainNotifsChannelId}\n${error.message}`)); + } + } + return; + } +} diff --git a/src/common/utils/classUtils.ts b/src/common/utils/classUtils.ts index 1762abca5..fa4fae9c6 100644 --- a/src/common/utils/classUtils.ts +++ b/src/common/utils/classUtils.ts @@ -2,11 +2,11 @@ import { uxLog } from "."; import * as c from "chalk"; import * as readFilesRecursive from "fs-readdir-recursive"; import * as path from "path"; -import * as fs from 'fs'; +import * as fs from "fs"; function findSubstringInFile(filePath: string, substring: string): Promise { return new Promise((resolve, reject) => { - fs.readFile(filePath, 'utf8', (err, data) => { + fs.readFile(filePath, "utf8", (err, data) => { if (err) { reject(err); return; @@ -20,7 +20,6 @@ function findSubstringInFile(filePath: string, substring: string): Promise=12.0.0" + +"@slack/types@^2.8.0": + version "2.8.0" + resolved "https://registry.yarnpkg.com/@slack/types/-/types-2.8.0.tgz#11ea10872262a7e6f86f54e5bcd4f91e3a41fe91" + integrity sha512-ghdfZSF0b4NC9ckBA8QnQgC9DJw2ZceDq0BIjjRSv6XAZBXJdWgxIsYz0TYnWSiqsKZGH2ZXbj9jYABZdH3OSQ== + +"@slack/web-api@^6.9.0": + version "6.9.0" + resolved "https://registry.yarnpkg.com/@slack/web-api/-/web-api-6.9.0.tgz#d829dcfef490dbce8e338912706b6f39dcde3ad2" + integrity sha512-RME5/F+jvQmZHkoP+ogrDbixq1Ms1mBmylzuWq4sf3f7GCpMPWoiZ+WqWk+sism3vrlveKWIgO9R4Qg9fiRyoQ== + dependencies: + "@slack/logger" "^3.0.0" + "@slack/types" "^2.8.0" + "@types/is-stream" "^1.1.0" + "@types/node" ">=12.0.0" + axios "^0.27.2" + eventemitter3 "^3.1.0" + form-data "^2.5.0" + is-electron "2.2.2" + is-stream "^1.1.0" + p-queue "^6.6.1" + p-retry "^4.0.0" + "@supercharge/promise-pool@^1.7.0": version "1.7.0" resolved "https://registry.npmjs.org/@supercharge/promise-pool/-/promise-pool-1.7.0.tgz" @@ -915,6 +944,13 @@ resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== +"@types/is-stream@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@types/is-stream/-/is-stream-1.1.0.tgz#b84d7bb207a210f2af9bed431dc0fbe9c4143be1" + integrity sha512-jkZatu4QVbR60mpIzjINmtS1ZF4a/FqdTUTBeQDVOQ2PYyidtwFKr0B5G6ERukKwliq+7mIXvxyppwzG5EgRYg== + dependencies: + "@types/node" "*" + "@types/jsforce@^1.9.35": version "1.9.38" resolved "https://registry.npmjs.org/@types/jsforce/-/jsforce-1.9.38.tgz" @@ -973,6 +1009,11 @@ resolved "https://registry.npmjs.org/@types/node/-/node-14.14.32.tgz" integrity sha512-/Ctrftx/zp4m8JOujM5ZhwzlWLx22nbQJiVqz8/zE15gOeEW+uly3FSX4fGFpcfEvFzXcMCJwq9lGVWgyARXhg== +"@types/node@>=12.0.0": + version "20.6.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.6.0.tgz#9d7daa855d33d4efec8aea88cd66db1c2f0ebe16" + integrity sha512-najjVq5KN2vsH2U/xyh2opaSEz6cZMR2SetLIlxlj08nOcmPOemJmUK2o4kUzfLqfrWE0PIrNeE16XhYDd3nqg== + "@types/normalize-package-data@^2.4.0": version "2.4.0" resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz" @@ -990,6 +1031,11 @@ dependencies: "@types/node" "*" +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + "@types/semver@^7.3.12": version "7.3.13" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.13.tgz#da4bfd73f49bd541d28920ab0e2bf0ee80f71c91" @@ -1450,6 +1496,14 @@ axios@^0.21.1: dependencies: follow-redirects "^1.14.0" +axios@^0.27.2: + version "0.27.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" + integrity sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ== + dependencies: + follow-redirects "^1.14.9" + form-data "^4.0.0" + azure-devops-node-api@^12.0.0: version "12.0.0" resolved "https://registry.yarnpkg.com/azure-devops-node-api/-/azure-devops-node-api-12.0.0.tgz#38b9892f88e86da46246218411920923d8dd6a52" @@ -2779,6 +2833,16 @@ event-target-shim@^5.0.0: resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== +eventemitter3@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" + integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== + +eventemitter3@^4.0.4: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + execa@^0.10.0: version "0.10.0" resolved "https://registry.npmjs.org/execa/-/execa-0.10.0.tgz" @@ -3150,6 +3214,11 @@ follow-redirects@^1.14.0: resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz" integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w== +follow-redirects@^1.14.9: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + for-in@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz" @@ -3168,6 +3237,15 @@ forever-agent@~0.6.1: resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= +form-data@^2.5.0: + version "2.5.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" + integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -4015,6 +4093,11 @@ is-docker@^2.0.0, is-docker@^2.1.1: resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.1.1.tgz" integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== +is-electron@2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-electron/-/is-electron-2.2.2.tgz#3778902a2044d76de98036f5dc58089ac4d80bb9" + integrity sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg== + is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" @@ -5509,6 +5592,29 @@ p-map@^3.0.0: dependencies: aggregate-error "^3.0.0" +p-queue@^6.6.1: + version "6.6.2" + resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-6.6.2.tgz#2068a9dcf8e67dd0ec3e7a2bcb76810faa85e426" + integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ== + dependencies: + eventemitter3 "^4.0.4" + p-timeout "^3.2.0" + +p-retry@^4.0.0: + version "4.6.2" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-timeout@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe" + integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg== + dependencies: + p-finally "^1.0.0" + p-try@^2.0.0, p-try@^2.1.0: version "2.2.0" resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" @@ -6170,6 +6276,11 @@ ret@~0.1.10: resolved "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + reusify@^1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz"